#Relevant libraries:
library(survival)
library(descr)
library(PAmeasures)
library(tidyverse)
library(kableExtra)
library(pec)
library(MASS)
library(gtsummary)
library(SurvMetrics)
library(gt)
data_directory <- "functions/"
source(paste0(data_directory, "pam.survreg.update.R"))
data(cancer, package = "survival")
n <- nrow(rotterdam)
# train-test split
test_p <- 0.15
set.seed(215) #cw added
test_indx <- sample(n, n * test_p)
rotterdam_test <- rotterdam[test_indx,]
rotterdam_train <- rotterdam[!seq_len(n) %in% test_indx,]
rot.cont <- rotterdam %>% select(age, nodes, pgr, er, rtime, dtime)
rot.cat <- rotterdam %>% select(year, meno, size, hormon, chemo, recur, death, grade)
tbl1 <- c("mean", "sd", "median", "min", "max") %>%
lapply(
function(.x) {
tbl_summary(
data = rot.cont,
statistic = everything() ~ paste0("{", .x, "}"),
include = c(age, nodes, pgr, er, rtime, dtime),
missing = "no"
) %>%
modify_header(all_stat_cols() ~ glue::glue("**{.x}**"))
}
) %>%
# Merge tables to get a column for each summary statistic
tbl_merge() %>%
modify_spanning_header(everything() ~ NA) %>%
modify_footnote(everything() ~ NA) %>%
modify_caption("**Table 1: Continuous Patient Characteristics**")
#gt::gtsave(as_gt(tbl1), file = file.path(getwd(), "tbl1.png"))
tbl1
| Characteristic | mean | sd | median | min | max |
|---|---|---|---|---|---|
| age | 55 | 13 | 54 | 24 | 90 |
| nodes | 3 | 4 | 1 | 0 | 34 |
| pgr | 162 | 291 | 41 | 0 | 5,004 |
| er | 167 | 272 | 61 | 0 | 3,275 |
| rtime | 2,098 | 1,398 | 1,940 | 36 | 7,043 |
| dtime | 2,605 | 1,298 | 2,639 | 36 | 7,043 |
tbl2 <- rot.cat %>%
tbl_summary(
statistic = list(
all_continuous() ~ "{mean} ({sd})",
all_categorical() ~ "{n} / {N} ({p}%)"
),
digits = all_continuous() ~ 2,
missing_text = "(Missing)"
) %>%
modify_caption("**Table 2: Categorical Patient Characteristics**")
#gt::gtsave(as_gt(tbl2), file = file.path(getwd(), "tbl2.png"))
tbl2
| Characteristic | N = 2,9821 |
|---|---|
| year | 1,988.16 (3.04) |
| meno | 1,670 / 2,982 (56%) |
| size | |
| Â Â Â Â <=20 | 1,387 / 2,982 (47%) |
| Â Â Â Â 20-50 | 1,291 / 2,982 (43%) |
| Â Â Â Â >50 | 304 / 2,982 (10%) |
| hormon | 339 / 2,982 (11%) |
| chemo | 580 / 2,982 (19%) |
| recur | 1,518 / 2,982 (51%) |
| death | 1,272 / 2,982 (43%) |
| grade | |
| Â Â Â Â 2 | 794 / 2,982 (27%) |
| Â Â Â Â 3 | 2,188 / 2,982 (73%) |
| 1 Mean (SD); n / N (%) | |
km<-survfit(Surv(dtime, death)~1,conf.type="log-log",data=rotterdam)
summary(km)
## Call: survfit(formula = Surv(dtime, death) ~ 1, data = rotterdam, conf.type = "log-log")
##
## time n.risk n.event survival std.err lower 95% CI upper 95% CI
## 45 2981 1 1.000 0.000335 0.998 1.000
## 64 2980 1 0.999 0.000474 0.997 1.000
## 74 2979 1 0.999 0.000581 0.997 1.000
## 97 2978 1 0.999 0.000670 0.996 0.999
## 101 2977 2 0.998 0.000821 0.996 0.999
## 141 2972 1 0.998 0.000887 0.995 0.999
## 151 2971 2 0.997 0.001005 0.994 0.998
## 153 2969 1 0.997 0.001059 0.994 0.998
## 164 2967 1 0.996 0.001111 0.993 0.998
## 177 2965 1 0.996 0.001160 0.993 0.998
## 186 2964 1 0.996 0.001208 0.992 0.997
## 198 2963 1 0.995 0.001253 0.992 0.997
## 204 2962 1 0.995 0.001297 0.992 0.997
## 210 2961 1 0.995 0.001339 0.991 0.997
## 212 2960 1 0.994 0.001380 0.991 0.996
## 215 2959 1 0.994 0.001420 0.990 0.996
## 217 2958 2 0.993 0.001497 0.990 0.996
## 223 2956 1 0.993 0.001533 0.989 0.995
## 228 2955 1 0.993 0.001569 0.989 0.995
## 231 2954 1 0.992 0.001604 0.988 0.995
## 232 2953 1 0.992 0.001639 0.988 0.995
## 239 2952 1 0.992 0.001672 0.988 0.994
## 245 2951 1 0.991 0.001705 0.987 0.994
## 251 2950 1 0.991 0.001737 0.987 0.994
## 255 2949 1 0.991 0.001769 0.986 0.993
## 256 2948 1 0.990 0.001800 0.986 0.993
## 261 2947 1 0.990 0.001830 0.986 0.993
## 266 2946 1 0.990 0.001860 0.985 0.993
## 272 2945 1 0.989 0.001890 0.985 0.992
## 273 2944 1 0.989 0.001919 0.984 0.992
## 274 2943 1 0.989 0.001947 0.984 0.992
## 281 2942 1 0.988 0.001975 0.984 0.992
## 287 2941 1 0.988 0.002003 0.983 0.991
## 296 2940 1 0.988 0.002030 0.983 0.991
## 298 2938 1 0.987 0.002057 0.982 0.991
## 299 2937 1 0.987 0.002084 0.982 0.990
## 306 2936 1 0.987 0.002110 0.982 0.990
## 307 2935 1 0.986 0.002136 0.981 0.990
## 309 2934 1 0.986 0.002162 0.981 0.990
## 310 2933 1 0.986 0.002187 0.981 0.989
## 316 2932 1 0.985 0.002212 0.980 0.989
## 319 2931 1 0.985 0.002236 0.980 0.989
## 325 2930 1 0.985 0.002261 0.979 0.988
## 332 2929 2 0.984 0.002309 0.979 0.988
## 334 2927 1 0.984 0.002332 0.978 0.988
## 338 2926 1 0.983 0.002356 0.978 0.987
## 342 2925 1 0.983 0.002379 0.978 0.987
## 345 2924 1 0.983 0.002401 0.977 0.987
## 349 2922 1 0.982 0.002424 0.977 0.986
## 354 2920 1 0.982 0.002446 0.976 0.986
## 355 2919 1 0.982 0.002469 0.976 0.986
## 358 2918 1 0.981 0.002491 0.976 0.985
## 359 2917 1 0.981 0.002512 0.975 0.985
## 361 2916 2 0.980 0.002555 0.974 0.985
## 366 2914 1 0.980 0.002576 0.974 0.984
## 368 2913 1 0.980 0.002597 0.974 0.984
## 372 2912 1 0.979 0.002618 0.973 0.984
## 373 2911 2 0.978 0.002659 0.973 0.983
## 374 2909 1 0.978 0.002680 0.972 0.983
## 378 2908 1 0.978 0.002700 0.972 0.983
## 379 2907 1 0.977 0.002720 0.971 0.982
## 381 2906 1 0.977 0.002739 0.971 0.982
## 382 2905 1 0.977 0.002759 0.971 0.982
## 383 2904 1 0.976 0.002779 0.970 0.981
## 389 2903 1 0.976 0.002798 0.970 0.981
## 390 2902 1 0.976 0.002817 0.970 0.981
## 393 2901 1 0.975 0.002836 0.969 0.980
## 394 2900 1 0.975 0.002855 0.969 0.980
## 397 2899 1 0.975 0.002874 0.968 0.980
## 404 2898 1 0.974 0.002892 0.968 0.980
## 407 2897 1 0.974 0.002911 0.968 0.979
## 413 2896 1 0.974 0.002929 0.967 0.979
## 418 2895 1 0.973 0.002947 0.967 0.979
## 426 2894 1 0.973 0.002966 0.967 0.978
## 433 2893 1 0.973 0.002984 0.966 0.978
## 435 2892 1 0.972 0.003001 0.966 0.978
## 437 2890 1 0.972 0.003019 0.966 0.977
## 440 2889 2 0.971 0.003054 0.965 0.977
## 441 2887 1 0.971 0.003072 0.964 0.977
## 443 2886 3 0.970 0.003123 0.963 0.976
## 445 2883 1 0.970 0.003140 0.963 0.975
## 446 2882 1 0.969 0.003157 0.963 0.975
## 447 2881 1 0.969 0.003174 0.962 0.975
## 448 2880 1 0.969 0.003191 0.962 0.974
## 449 2879 1 0.968 0.003207 0.961 0.974
## 450 2878 1 0.968 0.003224 0.961 0.974
## 451 2877 1 0.968 0.003240 0.961 0.974
## 459 2876 2 0.967 0.003273 0.960 0.973
## 464 2874 2 0.966 0.003305 0.959 0.972
## 465 2872 1 0.966 0.003321 0.959 0.972
## 466 2871 2 0.965 0.003352 0.958 0.971
## 468 2869 1 0.965 0.003368 0.958 0.971
## 470 2868 1 0.965 0.003384 0.957 0.971
## 472 2867 1 0.964 0.003399 0.957 0.970
## 477 2866 1 0.964 0.003415 0.957 0.970
## 479 2865 1 0.964 0.003430 0.956 0.970
## 485 2863 1 0.963 0.003445 0.956 0.970
## 486 2862 1 0.963 0.003460 0.956 0.969
## 487 2861 1 0.963 0.003476 0.955 0.969
## 490 2860 1 0.962 0.003491 0.955 0.969
## 492 2859 2 0.962 0.003520 0.954 0.968
## 493 2857 1 0.961 0.003535 0.954 0.968
## 494 2856 1 0.961 0.003550 0.953 0.967
## 497 2855 1 0.961 0.003565 0.953 0.967
## 500 2854 1 0.960 0.003579 0.953 0.967
## 503 2853 1 0.960 0.003594 0.952 0.966
## 518 2852 1 0.960 0.003608 0.952 0.966
## 520 2851 1 0.959 0.003623 0.952 0.966
## 530 2850 1 0.959 0.003637 0.951 0.966
## 533 2848 1 0.959 0.003651 0.951 0.965
## 539 2847 1 0.958 0.003666 0.950 0.965
## 540 2846 1 0.958 0.003680 0.950 0.965
## 544 2845 1 0.958 0.003694 0.950 0.964
## 549 2844 1 0.957 0.003708 0.949 0.964
## 550 2843 1 0.957 0.003722 0.949 0.964
## 552 2842 1 0.957 0.003736 0.949 0.963
## 557 2839 1 0.956 0.003749 0.948 0.963
## 558 2838 1 0.956 0.003763 0.948 0.963
## 560 2837 2 0.955 0.003791 0.947 0.962
## 564 2835 1 0.955 0.003804 0.947 0.962
## 567 2834 1 0.955 0.003818 0.946 0.962
## 569 2833 1 0.954 0.003831 0.946 0.961
## 570 2832 1 0.954 0.003845 0.946 0.961
## 574 2831 2 0.953 0.003871 0.945 0.960
## 576 2829 1 0.953 0.003885 0.945 0.960
## 581 2826 2 0.952 0.003911 0.944 0.959
## 582 2824 1 0.952 0.003924 0.944 0.959
## 583 2823 3 0.951 0.003963 0.942 0.958
## 584 2820 2 0.950 0.003989 0.942 0.957
## 585 2818 1 0.950 0.004002 0.941 0.957
## 587 2817 1 0.950 0.004015 0.941 0.957
## 589 2816 1 0.949 0.004027 0.941 0.957
## 590 2815 1 0.949 0.004040 0.940 0.956
## 592 2814 1 0.949 0.004053 0.940 0.956
## 596 2813 1 0.948 0.004065 0.940 0.956
## 598 2812 2 0.948 0.004090 0.939 0.955
## 601 2810 2 0.947 0.004115 0.938 0.954
## 603 2807 1 0.947 0.004127 0.938 0.954
## 606 2806 1 0.946 0.004140 0.937 0.954
## 607 2805 1 0.946 0.004152 0.937 0.953
## 608 2804 1 0.945 0.004164 0.937 0.953
## 609 2803 1 0.945 0.004176 0.936 0.953
## 610 2802 2 0.944 0.004200 0.936 0.952
## 618 2800 1 0.944 0.004212 0.935 0.952
## 619 2799 1 0.944 0.004224 0.935 0.952
## 621 2797 1 0.943 0.004236 0.935 0.951
## 627 2796 1 0.943 0.004248 0.934 0.951
## 631 2795 1 0.943 0.004260 0.934 0.951
## 633 2794 1 0.942 0.004272 0.933 0.950
## 636 2793 1 0.942 0.004284 0.933 0.950
## 639 2792 1 0.942 0.004295 0.933 0.950
## 640 2791 1 0.941 0.004307 0.932 0.949
## 644 2789 1 0.941 0.004319 0.932 0.949
## 649 2788 1 0.941 0.004330 0.932 0.949
## 650 2787 2 0.940 0.004354 0.931 0.948
## 651 2785 2 0.939 0.004376 0.930 0.947
## 655 2783 1 0.939 0.004388 0.930 0.947
## 656 2781 1 0.939 0.004399 0.930 0.947
## 659 2780 1 0.938 0.004411 0.929 0.946
## 662 2779 1 0.938 0.004422 0.929 0.946
## 666 2778 1 0.938 0.004433 0.928 0.946
## 674 2777 1 0.937 0.004445 0.928 0.946
## 677 2776 1 0.937 0.004456 0.928 0.945
## 679 2775 2 0.936 0.004478 0.927 0.945
## 682 2773 1 0.936 0.004489 0.927 0.944
## 683 2772 2 0.935 0.004511 0.926 0.944
## 687 2770 1 0.935 0.004522 0.926 0.943
## 689 2769 1 0.935 0.004533 0.925 0.943
## 690 2768 1 0.934 0.004544 0.925 0.943
## 692 2767 1 0.934 0.004555 0.924 0.942
## 699 2765 2 0.933 0.004577 0.924 0.942
## 702 2763 1 0.933 0.004587 0.923 0.941
## 706 2762 1 0.933 0.004598 0.923 0.941
## 710 2761 1 0.932 0.004609 0.923 0.941
## 711 2760 2 0.932 0.004630 0.922 0.940
## 712 2758 1 0.931 0.004641 0.922 0.940
## 714 2757 1 0.931 0.004652 0.921 0.940
## 715 2756 1 0.931 0.004662 0.921 0.939
## 717 2755 1 0.930 0.004673 0.921 0.939
## 719 2754 2 0.930 0.004694 0.920 0.938
## 720 2752 1 0.929 0.004704 0.919 0.938
## 722 2751 1 0.929 0.004714 0.919 0.938
## 723 2750 2 0.928 0.004735 0.918 0.937
## 724 2748 2 0.928 0.004756 0.918 0.936
## 726 2746 2 0.927 0.004776 0.917 0.936
## 730 2744 1 0.927 0.004786 0.917 0.935
## 732 2743 2 0.926 0.004807 0.916 0.935
## 733 2741 1 0.926 0.004817 0.916 0.934
## 736 2740 1 0.925 0.004827 0.915 0.934
## 737 2739 1 0.925 0.004837 0.915 0.934
## 740 2738 1 0.925 0.004847 0.914 0.934
## 741 2737 1 0.924 0.004857 0.914 0.933
## 743 2736 1 0.924 0.004867 0.914 0.933
## 744 2735 1 0.924 0.004877 0.913 0.933
## 746 2734 1 0.923 0.004887 0.913 0.932
## 748 2733 1 0.923 0.004897 0.913 0.932
## 750 2732 1 0.923 0.004906 0.912 0.932
## 751 2731 2 0.922 0.004926 0.912 0.931
## 753 2729 3 0.921 0.004955 0.911 0.930
## 758 2726 4 0.919 0.004994 0.909 0.929
## 762 2722 1 0.919 0.005003 0.909 0.928
## 765 2721 1 0.919 0.005013 0.908 0.928
## 766 2720 1 0.918 0.005022 0.908 0.928
## 767 2719 1 0.918 0.005032 0.908 0.927
## 769 2718 1 0.918 0.005041 0.907 0.927
## 770 2717 1 0.917 0.005051 0.907 0.927
## 776 2716 1 0.917 0.005060 0.907 0.926
## 781 2715 2 0.916 0.005079 0.906 0.926
## 784 2713 1 0.916 0.005088 0.906 0.926
## 785 2712 2 0.915 0.005107 0.905 0.925
## 789 2710 1 0.915 0.005116 0.904 0.925
## 791 2709 2 0.914 0.005135 0.904 0.924
## 792 2707 1 0.914 0.005144 0.903 0.924
## 793 2706 2 0.913 0.005162 0.903 0.923
## 797 2703 1 0.913 0.005171 0.902 0.923
## 799 2702 1 0.913 0.005181 0.902 0.922
## 801 2701 1 0.912 0.005190 0.902 0.922
## 803 2700 1 0.912 0.005199 0.901 0.922
## 805 2699 2 0.911 0.005217 0.901 0.921
## 809 2697 1 0.911 0.005226 0.900 0.921
## 810 2695 1 0.911 0.005235 0.900 0.920
## 811 2694 1 0.910 0.005244 0.900 0.920
## 814 2693 1 0.910 0.005253 0.899 0.920
## 815 2692 1 0.910 0.005262 0.899 0.919
## 816 2691 1 0.909 0.005270 0.898 0.919
## 817 2690 1 0.909 0.005279 0.898 0.919
## 818 2689 1 0.909 0.005288 0.898 0.919
## 819 2688 1 0.908 0.005297 0.897 0.918
## 820 2687 1 0.908 0.005306 0.897 0.918
## 821 2686 1 0.908 0.005315 0.897 0.918
## 822 2685 2 0.907 0.005332 0.896 0.917
## 823 2683 1 0.907 0.005341 0.896 0.917
## 824 2682 1 0.906 0.005350 0.895 0.916
## 826 2681 2 0.906 0.005367 0.895 0.916
## 827 2679 1 0.905 0.005376 0.894 0.915
## 833 2678 1 0.905 0.005384 0.894 0.915
## 834 2677 1 0.905 0.005393 0.893 0.915
## 835 2676 1 0.904 0.005401 0.893 0.914
## 841 2675 1 0.904 0.005410 0.893 0.914
## 842 2674 1 0.904 0.005418 0.892 0.914
## 845 2673 2 0.903 0.005435 0.892 0.913
## 846 2671 1 0.903 0.005444 0.891 0.913
## 850 2670 2 0.902 0.005461 0.891 0.912
## 852 2667 2 0.901 0.005478 0.890 0.911
## 854 2664 1 0.901 0.005486 0.890 0.911
## 857 2662 1 0.901 0.005494 0.889 0.911
## 864 2661 1 0.900 0.005503 0.889 0.910
## 866 2660 1 0.900 0.005511 0.889 0.910
## 869 2659 1 0.900 0.005519 0.888 0.910
## 872 2658 1 0.899 0.005528 0.888 0.910
## 881 2656 2 0.899 0.005544 0.887 0.909
## 885 2654 1 0.898 0.005552 0.887 0.909
## 886 2653 1 0.898 0.005561 0.886 0.908
## 887 2652 3 0.897 0.005585 0.885 0.907
## 890 2649 2 0.896 0.005601 0.885 0.907
## 891 2647 2 0.895 0.005618 0.884 0.906
## 892 2645 1 0.895 0.005626 0.884 0.906
## 893 2644 1 0.895 0.005634 0.883 0.905
## 895 2643 3 0.894 0.005658 0.882 0.904
## 897 2640 1 0.893 0.005666 0.882 0.904
## 898 2639 2 0.893 0.005682 0.881 0.903
## 900 2637 1 0.892 0.005689 0.881 0.903
## 902 2636 1 0.892 0.005697 0.880 0.903
## 903 2635 2 0.891 0.005713 0.880 0.902
## 904 2633 1 0.891 0.005721 0.879 0.902
## 905 2631 2 0.890 0.005737 0.879 0.901
## 907 2629 1 0.890 0.005744 0.878 0.901
## 910 2628 1 0.890 0.005752 0.878 0.900
## 912 2627 1 0.889 0.005760 0.878 0.900
## 915 2625 1 0.889 0.005768 0.877 0.900
## 918 2623 3 0.888 0.005791 0.876 0.899
## 919 2620 1 0.888 0.005799 0.876 0.899
## 921 2619 2 0.887 0.005814 0.875 0.898
## 922 2617 2 0.886 0.005829 0.874 0.897
## 923 2615 1 0.886 0.005837 0.874 0.897
## 925 2614 1 0.886 0.005845 0.874 0.897
## 927 2613 1 0.885 0.005852 0.873 0.896
## 928 2612 2 0.885 0.005867 0.873 0.896
## 929 2610 1 0.884 0.005875 0.872 0.895
## 931 2609 1 0.884 0.005882 0.872 0.895
## 936 2608 1 0.884 0.005890 0.872 0.895
## 937 2607 1 0.883 0.005897 0.871 0.894
## 942 2605 1 0.883 0.005905 0.871 0.894
## 944 2604 1 0.883 0.005912 0.870 0.894
## 945 2603 1 0.882 0.005920 0.870 0.893
## 946 2602 2 0.882 0.005935 0.869 0.893
## 947 2600 1 0.881 0.005942 0.869 0.892
## 949 2599 1 0.881 0.005949 0.869 0.892
## 952 2598 1 0.881 0.005957 0.868 0.892
## 953 2597 3 0.880 0.005979 0.867 0.891
## 955 2594 1 0.879 0.005986 0.867 0.890
## 957 2593 3 0.878 0.006008 0.866 0.889
## 959 2590 1 0.878 0.006015 0.866 0.889
## 965 2589 1 0.878 0.006022 0.865 0.889
## 966 2588 1 0.877 0.006029 0.865 0.888
## 967 2587 1 0.877 0.006037 0.864 0.888
## 969 2586 2 0.876 0.006051 0.864 0.888
## 972 2584 2 0.875 0.006065 0.863 0.887
## 973 2582 2 0.875 0.006080 0.862 0.886
## 976 2580 2 0.874 0.006094 0.862 0.886
## 977 2578 1 0.874 0.006101 0.861 0.885
## 978 2577 1 0.873 0.006108 0.861 0.885
## 980 2576 1 0.873 0.006115 0.861 0.885
## 981 2575 1 0.873 0.006122 0.860 0.884
## 982 2573 1 0.872 0.006129 0.860 0.884
## 984 2572 1 0.872 0.006136 0.860 0.884
## 989 2571 1 0.872 0.006143 0.859 0.883
## 990 2570 1 0.871 0.006150 0.859 0.883
## 991 2569 1 0.871 0.006157 0.858 0.883
## 994 2568 1 0.871 0.006164 0.858 0.882
## 995 2567 1 0.870 0.006171 0.858 0.882
## 996 2566 1 0.870 0.006178 0.857 0.882
## 998 2564 1 0.870 0.006184 0.857 0.881
## 999 2563 5 0.868 0.006219 0.855 0.880
## 1002 2558 1 0.868 0.006226 0.855 0.879
## 1007 2556 1 0.867 0.006232 0.855 0.879
## 1008 2555 1 0.867 0.006239 0.854 0.879
## 1009 2554 4 0.866 0.006266 0.853 0.877
## 1010 2550 1 0.865 0.006273 0.852 0.877
## 1016 2549 1 0.865 0.006280 0.852 0.877
## 1017 2548 1 0.865 0.006286 0.852 0.876
## 1020 2547 1 0.864 0.006293 0.851 0.876
## 1021 2546 1 0.864 0.006300 0.851 0.876
## 1024 2545 3 0.863 0.006320 0.850 0.875
## 1025 2542 1 0.863 0.006326 0.850 0.874
## 1026 2540 1 0.862 0.006333 0.849 0.874
## 1030 2539 2 0.862 0.006346 0.849 0.874
## 1031 2537 1 0.861 0.006353 0.848 0.873
## 1032 2536 1 0.861 0.006359 0.848 0.873
## 1034 2535 1 0.861 0.006366 0.848 0.873
## 1037 2534 1 0.860 0.006372 0.847 0.872
## 1043 2532 1 0.860 0.006379 0.847 0.872
## 1048 2531 3 0.859 0.006398 0.846 0.871
## 1049 2528 1 0.859 0.006405 0.845 0.871
## 1052 2527 1 0.858 0.006411 0.845 0.870
## 1053 2526 1 0.858 0.006418 0.845 0.870
## 1054 2525 2 0.857 0.006431 0.844 0.869
## 1056 2523 1 0.857 0.006437 0.844 0.869
## 1057 2522 1 0.856 0.006444 0.843 0.869
## 1059 2521 1 0.856 0.006450 0.843 0.868
## 1062 2520 1 0.856 0.006456 0.843 0.868
## 1063 2519 1 0.855 0.006463 0.842 0.868
## 1068 2518 1 0.855 0.006469 0.842 0.867
## 1071 2515 1 0.855 0.006475 0.842 0.867
## 1073 2514 1 0.854 0.006482 0.841 0.867
## 1074 2513 1 0.854 0.006488 0.841 0.866
## 1076 2512 1 0.854 0.006494 0.841 0.866
## 1083 2511 1 0.853 0.006501 0.840 0.866
## 1087 2510 1 0.853 0.006507 0.840 0.865
## 1090 2509 1 0.853 0.006513 0.839 0.865
## 1091 2507 1 0.852 0.006520 0.839 0.865
## 1101 2506 2 0.852 0.006532 0.838 0.864
## 1102 2504 1 0.851 0.006538 0.838 0.864
## 1103 2503 1 0.851 0.006545 0.838 0.863
## 1104 2502 1 0.851 0.006551 0.837 0.863
## 1107 2500 1 0.850 0.006557 0.837 0.863
## 1108 2499 1 0.850 0.006563 0.837 0.862
## 1110 2498 1 0.850 0.006569 0.836 0.862
## 1114 2497 1 0.849 0.006576 0.836 0.862
## 1115 2496 2 0.849 0.006588 0.835 0.861
## 1119 2494 1 0.848 0.006594 0.835 0.861
## 1124 2493 2 0.848 0.006606 0.834 0.860
## 1125 2491 1 0.847 0.006612 0.834 0.860
## 1126 2490 1 0.847 0.006618 0.833 0.859
## 1128 2489 1 0.847 0.006625 0.833 0.859
## 1129 2488 2 0.846 0.006637 0.832 0.858
## 1131 2486 1 0.846 0.006643 0.832 0.858
## 1133 2485 1 0.845 0.006649 0.832 0.858
## 1139 2484 1 0.845 0.006655 0.831 0.857
## 1140 2483 2 0.844 0.006667 0.831 0.857
## 1142 2481 1 0.844 0.006673 0.830 0.856
## 1146 2480 1 0.844 0.006679 0.830 0.856
## 1147 2479 1 0.843 0.006685 0.830 0.856
## 1150 2478 1 0.843 0.006691 0.829 0.856
## 1154 2476 1 0.843 0.006697 0.829 0.855
## 1161 2475 1 0.842 0.006703 0.829 0.855
## 1162 2474 1 0.842 0.006709 0.828 0.855
## 1163 2473 1 0.842 0.006714 0.828 0.854
## 1165 2472 1 0.841 0.006720 0.828 0.854
## 1166 2471 2 0.840 0.006732 0.827 0.853
## 1167 2469 1 0.840 0.006738 0.826 0.853
## 1169 2468 1 0.840 0.006744 0.826 0.853
## 1174 2467 1 0.839 0.006750 0.826 0.852
## 1175 2466 2 0.839 0.006761 0.825 0.852
## 1177 2464 1 0.838 0.006767 0.825 0.851
## 1179 2463 1 0.838 0.006773 0.824 0.851
## 1183 2462 1 0.838 0.006779 0.824 0.851
## 1186 2461 1 0.837 0.006785 0.824 0.850
## 1187 2460 1 0.837 0.006790 0.823 0.850
## 1188 2459 1 0.837 0.006796 0.823 0.850
## 1189 2458 3 0.836 0.006813 0.822 0.849
## 1191 2455 1 0.835 0.006819 0.822 0.848
## 1192 2454 2 0.835 0.006831 0.821 0.848
## 1193 2452 1 0.834 0.006836 0.820 0.847
## 1198 2451 1 0.834 0.006842 0.820 0.847
## 1200 2450 2 0.833 0.006853 0.819 0.846
## 1204 2448 1 0.833 0.006859 0.819 0.846
## 1208 2447 1 0.833 0.006865 0.819 0.846
## 1216 2446 1 0.832 0.006870 0.818 0.845
## 1217 2445 1 0.832 0.006876 0.818 0.845
## 1218 2444 1 0.832 0.006881 0.818 0.845
## 1222 2443 1 0.831 0.006887 0.817 0.844
## 1224 2442 1 0.831 0.006893 0.817 0.844
## 1229 2440 1 0.831 0.006898 0.817 0.844
## 1231 2439 2 0.830 0.006909 0.816 0.843
## 1232 2437 2 0.829 0.006920 0.815 0.842
## 1233 2434 1 0.829 0.006926 0.815 0.842
## 1235 2433 1 0.829 0.006931 0.815 0.842
## 1237 2432 1 0.828 0.006937 0.814 0.841
## 1238 2431 1 0.828 0.006942 0.814 0.841
## 1244 2430 1 0.828 0.006948 0.813 0.841
## 1247 2429 1 0.827 0.006953 0.813 0.840
## 1248 2427 2 0.827 0.006964 0.812 0.840
## 1249 2425 1 0.826 0.006970 0.812 0.839
## 1250 2424 1 0.826 0.006975 0.812 0.839
## 1256 2423 1 0.826 0.006981 0.811 0.839
## 1265 2422 1 0.825 0.006986 0.811 0.838
## 1267 2421 2 0.824 0.006997 0.810 0.838
## 1268 2419 3 0.823 0.007013 0.809 0.837
## 1269 2415 2 0.823 0.007024 0.809 0.836
## 1275 2413 1 0.822 0.007029 0.808 0.836
## 1287 2411 3 0.821 0.007045 0.807 0.835
## 1289 2408 2 0.821 0.007056 0.806 0.834
## 1294 2406 1 0.820 0.007061 0.806 0.834
## 1296 2405 2 0.820 0.007072 0.805 0.833
## 1303 2401 1 0.819 0.007077 0.805 0.833
## 1308 2400 2 0.819 0.007088 0.804 0.832
## 1310 2398 1 0.818 0.007093 0.804 0.832
## 1315 2396 1 0.818 0.007098 0.804 0.831
## 1317 2395 1 0.818 0.007104 0.803 0.831
## 1319 2394 3 0.817 0.007119 0.802 0.830
## 1322 2391 1 0.816 0.007124 0.802 0.830
## 1323 2390 1 0.816 0.007130 0.802 0.829
## 1324 2389 1 0.816 0.007135 0.801 0.829
## 1327 2387 1 0.815 0.007140 0.801 0.829
## 1331 2386 1 0.815 0.007145 0.800 0.828
## 1334 2383 1 0.815 0.007150 0.800 0.828
## 1335 2382 1 0.814 0.007156 0.800 0.828
## 1344 2379 1 0.814 0.007161 0.799 0.827
## 1346 2377 2 0.813 0.007171 0.799 0.827
## 1347 2375 2 0.813 0.007181 0.798 0.826
## 1350 2373 1 0.812 0.007186 0.798 0.826
## 1351 2372 1 0.812 0.007192 0.797 0.825
## 1352 2371 1 0.812 0.007197 0.797 0.825
## 1360 2369 2 0.811 0.007207 0.796 0.824
## 1361 2367 4 0.809 0.007227 0.795 0.823
## 1363 2363 1 0.809 0.007232 0.794 0.823
## 1368 2362 1 0.809 0.007237 0.794 0.823
## 1372 2361 1 0.808 0.007242 0.794 0.822
## 1380 2360 1 0.808 0.007247 0.793 0.822
## 1386 2359 1 0.808 0.007252 0.793 0.822
## 1390 2358 1 0.807 0.007257 0.793 0.821
## 1391 2357 1 0.807 0.007262 0.792 0.821
## 1392 2356 2 0.806 0.007272 0.792 0.820
## 1393 2354 2 0.806 0.007282 0.791 0.820
## 1395 2352 2 0.805 0.007292 0.790 0.819
## 1399 2350 1 0.805 0.007297 0.790 0.819
## 1402 2348 1 0.804 0.007302 0.790 0.818
## 1405 2347 1 0.804 0.007307 0.789 0.818
## 1406 2346 1 0.804 0.007312 0.789 0.818
## 1408 2345 1 0.803 0.007317 0.788 0.817
## 1409 2343 1 0.803 0.007322 0.788 0.817
## 1413 2342 1 0.803 0.007327 0.788 0.817
## 1414 2341 1 0.802 0.007332 0.787 0.816
## 1421 2339 1 0.802 0.007336 0.787 0.816
## 1423 2337 1 0.802 0.007341 0.787 0.816
## 1424 2336 1 0.801 0.007346 0.786 0.815
## 1426 2335 1 0.801 0.007351 0.786 0.815
## 1429 2334 2 0.800 0.007361 0.785 0.814
## 1433 2332 1 0.800 0.007366 0.785 0.814
## 1434 2331 1 0.800 0.007370 0.785 0.814
## 1437 2330 1 0.799 0.007375 0.784 0.813
## 1445 2329 1 0.799 0.007380 0.784 0.813
## 1446 2328 1 0.798 0.007385 0.784 0.813
## 1448 2327 1 0.798 0.007390 0.783 0.812
## 1449 2326 1 0.798 0.007394 0.783 0.812
## 1450 2325 1 0.797 0.007399 0.782 0.812
## 1454 2323 3 0.796 0.007414 0.781 0.811
## 1455 2320 1 0.796 0.007418 0.781 0.810
## 1456 2319 1 0.796 0.007423 0.781 0.810
## 1457 2318 1 0.795 0.007428 0.780 0.810
## 1459 2317 1 0.795 0.007432 0.780 0.809
## 1460 2316 2 0.794 0.007442 0.779 0.809
## 1464 2314 1 0.794 0.007447 0.779 0.808
## 1470 2313 1 0.794 0.007451 0.779 0.808
## 1472 2312 1 0.793 0.007456 0.778 0.808
## 1473 2311 1 0.793 0.007461 0.778 0.807
## 1476 2309 2 0.792 0.007470 0.777 0.807
## 1477 2307 1 0.792 0.007475 0.777 0.806
## 1482 2305 1 0.792 0.007479 0.777 0.806
## 1486 2304 1 0.791 0.007484 0.776 0.806
## 1488 2302 1 0.791 0.007489 0.776 0.805
## 1493 2301 1 0.791 0.007493 0.775 0.805
## 1495 2299 2 0.790 0.007502 0.775 0.804
## 1499 2297 1 0.790 0.007507 0.774 0.804
## 1500 2296 1 0.789 0.007512 0.774 0.804
## 1501 2295 1 0.789 0.007516 0.774 0.803
## 1502 2294 1 0.789 0.007521 0.773 0.803
## 1513 2292 1 0.788 0.007525 0.773 0.802
## 1514 2291 1 0.788 0.007530 0.773 0.802
## 1515 2290 2 0.787 0.007539 0.772 0.801
## 1521 2287 1 0.787 0.007544 0.772 0.801
## 1522 2286 1 0.786 0.007548 0.771 0.801
## 1523 2285 1 0.786 0.007553 0.771 0.800
## 1526 2283 1 0.786 0.007557 0.771 0.800
## 1528 2280 1 0.785 0.007562 0.770 0.800
## 1530 2279 1 0.785 0.007566 0.770 0.799
## 1537 2278 2 0.784 0.007575 0.769 0.799
## 1538 2276 1 0.784 0.007580 0.769 0.798
## 1542 2274 2 0.783 0.007589 0.768 0.798
## 1543 2272 1 0.783 0.007593 0.768 0.797
## 1546 2270 1 0.783 0.007598 0.767 0.797
## 1547 2269 1 0.782 0.007602 0.767 0.797
## 1548 2268 1 0.782 0.007607 0.767 0.796
## 1553 2267 1 0.782 0.007611 0.766 0.796
## 1555 2266 1 0.781 0.007616 0.766 0.796
## 1562 2265 2 0.781 0.007625 0.765 0.795
## 1563 2262 1 0.780 0.007629 0.765 0.795
## 1566 2260 2 0.780 0.007638 0.764 0.794
## 1568 2258 1 0.779 0.007642 0.764 0.794
## 1571 2257 1 0.779 0.007647 0.763 0.793
## 1572 2256 1 0.779 0.007651 0.763 0.793
## 1575 2254 2 0.778 0.007660 0.762 0.792
## 1579 2252 1 0.777 0.007664 0.762 0.792
## 1582 2251 1 0.777 0.007669 0.762 0.792
## 1583 2250 1 0.777 0.007673 0.761 0.791
## 1592 2247 1 0.776 0.007677 0.761 0.791
## 1596 2244 2 0.776 0.007686 0.760 0.790
## 1598 2242 1 0.775 0.007691 0.760 0.790
## 1600 2241 1 0.775 0.007695 0.760 0.790
## 1601 2240 1 0.775 0.007699 0.759 0.789
## 1607 2237 2 0.774 0.007708 0.758 0.789
## 1609 2234 1 0.774 0.007712 0.758 0.788
## 1615 2233 1 0.773 0.007717 0.758 0.788
## 1618 2232 1 0.773 0.007721 0.757 0.788
## 1621 2230 1 0.773 0.007725 0.757 0.787
## 1623 2227 1 0.772 0.007729 0.757 0.787
## 1625 2225 1 0.772 0.007734 0.756 0.787
## 1629 2224 2 0.771 0.007742 0.756 0.786
## 1640 2220 1 0.771 0.007747 0.755 0.786
## 1644 2218 1 0.771 0.007751 0.755 0.785
## 1645 2217 1 0.770 0.007755 0.755 0.785
## 1649 2214 2 0.770 0.007764 0.754 0.784
## 1653 2212 2 0.769 0.007772 0.753 0.784
## 1655 2210 1 0.768 0.007777 0.753 0.783
## 1660 2208 1 0.768 0.007781 0.752 0.783
## 1665 2207 3 0.767 0.007794 0.751 0.782
## 1670 2204 1 0.767 0.007798 0.751 0.782
## 1672 2203 1 0.766 0.007802 0.751 0.781
## 1673 2202 2 0.766 0.007811 0.750 0.781
## 1674 2200 1 0.765 0.007815 0.750 0.780
## 1676 2199 2 0.765 0.007823 0.749 0.780
## 1677 2196 1 0.764 0.007827 0.749 0.779
## 1680 2192 1 0.764 0.007832 0.748 0.779
## 1682 2191 1 0.764 0.007836 0.748 0.779
## 1684 2190 1 0.763 0.007840 0.747 0.778
## 1686 2189 1 0.763 0.007844 0.747 0.778
## 1689 2186 1 0.763 0.007848 0.747 0.778
## 1692 2185 1 0.762 0.007852 0.746 0.777
## 1694 2184 2 0.762 0.007861 0.746 0.777
## 1699 2179 2 0.761 0.007869 0.745 0.776
## 1701 2177 1 0.760 0.007873 0.745 0.775
## 1707 2176 1 0.760 0.007877 0.744 0.775
## 1722 2170 1 0.760 0.007881 0.744 0.775
## 1723 2168 2 0.759 0.007890 0.743 0.774
## 1727 2166 2 0.758 0.007898 0.742 0.773
## 1729 2163 1 0.758 0.007902 0.742 0.773
## 1732 2162 1 0.758 0.007906 0.742 0.773
## 1733 2161 1 0.757 0.007910 0.741 0.772
## 1736 2160 1 0.757 0.007914 0.741 0.772
## 1738 2159 1 0.757 0.007919 0.741 0.772
## 1739 2157 2 0.756 0.007927 0.740 0.771
## 1742 2154 1 0.756 0.007931 0.740 0.771
## 1744 2153 1 0.755 0.007935 0.739 0.770
## 1745 2152 1 0.755 0.007939 0.739 0.770
## 1746 2150 1 0.755 0.007943 0.739 0.770
## 1750 2148 1 0.754 0.007947 0.738 0.769
## 1755 2145 1 0.754 0.007951 0.738 0.769
## 1756 2144 1 0.753 0.007955 0.737 0.769
## 1761 2143 1 0.753 0.007959 0.737 0.768
## 1763 2142 1 0.753 0.007963 0.737 0.768
## 1768 2140 2 0.752 0.007971 0.736 0.767
## 1772 2137 3 0.751 0.007984 0.735 0.766
## 1776 2132 1 0.751 0.007988 0.735 0.766
## 1777 2131 1 0.750 0.007992 0.734 0.766
## 1780 2128 1 0.750 0.007996 0.734 0.765
## 1791 2120 1 0.750 0.008000 0.733 0.765
## 1794 2117 3 0.749 0.008012 0.732 0.764
## 1795 2114 1 0.748 0.008016 0.732 0.763
## 1797 2112 1 0.748 0.008020 0.732 0.763
## 1800 2107 1 0.747 0.008024 0.731 0.763
## 1802 2104 1 0.747 0.008028 0.731 0.762
## 1804 2102 1 0.747 0.008032 0.731 0.762
## 1807 2101 1 0.746 0.008036 0.730 0.762
## 1812 2099 1 0.746 0.008040 0.730 0.761
## 1814 2095 3 0.745 0.008052 0.729 0.760
## 1816 2092 1 0.745 0.008056 0.728 0.760
## 1817 2091 1 0.744 0.008060 0.728 0.760
## 1818 2090 1 0.744 0.008064 0.728 0.759
## 1824 2085 1 0.744 0.008068 0.727 0.759
## 1831 2083 1 0.743 0.008072 0.727 0.759
## 1841 2073 1 0.743 0.008076 0.727 0.758
## 1844 2071 1 0.742 0.008080 0.726 0.758
## 1845 2069 1 0.742 0.008084 0.726 0.758
## 1849 2068 1 0.742 0.008088 0.725 0.757
## 1850 2067 2 0.741 0.008097 0.725 0.757
## 1851 2064 1 0.741 0.008101 0.724 0.756
## 1855 2062 1 0.740 0.008105 0.724 0.756
## 1857 2060 1 0.740 0.008109 0.724 0.755
## 1862 2059 1 0.740 0.008113 0.723 0.755
## 1867 2053 1 0.739 0.008117 0.723 0.755
## 1871 2049 1 0.739 0.008121 0.723 0.754
## 1872 2048 1 0.739 0.008125 0.722 0.754
## 1874 2047 1 0.738 0.008129 0.722 0.754
## 1877 2045 1 0.738 0.008133 0.721 0.753
## 1882 2042 1 0.737 0.008137 0.721 0.753
## 1885 2039 1 0.737 0.008141 0.721 0.753
## 1886 2038 1 0.737 0.008145 0.720 0.752
## 1888 2036 1 0.736 0.008149 0.720 0.752
## 1889 2034 1 0.736 0.008153 0.720 0.752
## 1891 2033 2 0.735 0.008161 0.719 0.751
## 1898 2030 1 0.735 0.008165 0.718 0.751
## 1899 2029 2 0.734 0.008173 0.718 0.750
## 1901 2027 1 0.734 0.008177 0.717 0.749
## 1904 2026 1 0.733 0.008181 0.717 0.749
## 1907 2025 1 0.733 0.008185 0.717 0.749
## 1908 2024 1 0.733 0.008189 0.716 0.748
## 1909 2023 1 0.732 0.008193 0.716 0.748
## 1912 2022 1 0.732 0.008197 0.716 0.748
## 1914 2020 1 0.732 0.008201 0.715 0.747
## 1925 2017 1 0.731 0.008205 0.715 0.747
## 1926 2016 1 0.731 0.008209 0.714 0.747
## 1927 2015 1 0.731 0.008213 0.714 0.746
## 1935 2011 1 0.730 0.008217 0.714 0.746
## 1936 2010 2 0.729 0.008224 0.713 0.745
## 1943 2003 1 0.729 0.008228 0.713 0.745
## 1944 2002 1 0.729 0.008232 0.712 0.744
## 1945 2000 1 0.728 0.008236 0.712 0.744
## 1954 1997 1 0.728 0.008240 0.711 0.744
## 1957 1995 1 0.728 0.008244 0.711 0.743
## 1958 1994 1 0.727 0.008248 0.711 0.743
## 1966 1992 1 0.727 0.008252 0.710 0.743
## 1968 1990 1 0.727 0.008256 0.710 0.742
## 1972 1988 1 0.726 0.008260 0.710 0.742
## 1973 1986 1 0.726 0.008264 0.709 0.742
## 1974 1985 1 0.725 0.008268 0.709 0.741
## 1977 1982 1 0.725 0.008272 0.708 0.741
## 1981 1981 1 0.725 0.008276 0.708 0.741
## 1982 1979 1 0.724 0.008280 0.708 0.740
## 1983 1977 1 0.724 0.008283 0.707 0.740
## 1986 1976 1 0.724 0.008287 0.707 0.739
## 1987 1974 1 0.723 0.008291 0.707 0.739
## 1990 1971 1 0.723 0.008295 0.706 0.739
## 1991 1969 2 0.722 0.008303 0.705 0.738
## 1992 1967 1 0.722 0.008307 0.705 0.738
## 1993 1966 1 0.721 0.008311 0.705 0.737
## 1994 1965 1 0.721 0.008315 0.704 0.737
## 1996 1963 1 0.721 0.008319 0.704 0.737
## 2002 1962 1 0.720 0.008322 0.704 0.736
## 2009 1959 1 0.720 0.008326 0.703 0.736
## 2011 1955 1 0.720 0.008330 0.703 0.736
## 2015 1952 1 0.719 0.008334 0.702 0.735
## 2018 1948 2 0.718 0.008342 0.702 0.734
## 2020 1946 1 0.718 0.008346 0.701 0.734
## 2022 1943 1 0.718 0.008350 0.701 0.734
## 2027 1941 1 0.717 0.008353 0.701 0.733
## 2028 1940 1 0.717 0.008357 0.700 0.733
## 2029 1939 1 0.717 0.008361 0.700 0.733
## 2030 1938 1 0.716 0.008365 0.699 0.732
## 2032 1936 1 0.716 0.008369 0.699 0.732
## 2033 1934 1 0.716 0.008373 0.699 0.732
## 2035 1933 1 0.715 0.008377 0.698 0.731
## 2046 1927 1 0.715 0.008380 0.698 0.731
## 2049 1926 1 0.714 0.008384 0.698 0.730
## 2050 1924 1 0.714 0.008388 0.697 0.730
## 2051 1922 1 0.714 0.008392 0.697 0.730
## 2052 1921 1 0.713 0.008396 0.696 0.729
## 2054 1920 2 0.713 0.008404 0.696 0.729
## 2057 1916 1 0.712 0.008407 0.695 0.728
## 2058 1915 1 0.712 0.008411 0.695 0.728
## 2060 1914 1 0.711 0.008415 0.695 0.728
## 2062 1912 1 0.711 0.008419 0.694 0.727
## 2063 1911 2 0.710 0.008427 0.693 0.726
## 2072 1904 2 0.710 0.008434 0.693 0.726
## 2074 1901 1 0.709 0.008438 0.692 0.725
## 2076 1900 1 0.709 0.008442 0.692 0.725
## 2080 1899 1 0.708 0.008446 0.692 0.725
## 2082 1896 1 0.708 0.008449 0.691 0.724
## 2083 1895 1 0.708 0.008453 0.691 0.724
## 2086 1890 2 0.707 0.008461 0.690 0.723
## 2088 1888 1 0.707 0.008465 0.690 0.723
## 2091 1886 1 0.706 0.008468 0.689 0.722
## 2098 1882 1 0.706 0.008472 0.689 0.722
## 2099 1879 1 0.705 0.008476 0.688 0.722
## 2101 1878 1 0.705 0.008480 0.688 0.721
## 2104 1875 1 0.705 0.008484 0.688 0.721
## 2105 1873 1 0.704 0.008487 0.687 0.721
## 2108 1872 1 0.704 0.008491 0.687 0.720
## 2112 1868 1 0.704 0.008495 0.687 0.720
## 2114 1867 1 0.703 0.008499 0.686 0.719
## 2115 1866 1 0.703 0.008503 0.686 0.719
## 2121 1860 1 0.702 0.008507 0.685 0.719
## 2122 1859 1 0.702 0.008510 0.685 0.718
## 2124 1858 1 0.702 0.008514 0.685 0.718
## 2128 1854 1 0.701 0.008518 0.684 0.718
## 2129 1853 1 0.701 0.008522 0.684 0.717
## 2133 1852 1 0.701 0.008526 0.683 0.717
## 2134 1850 1 0.700 0.008529 0.683 0.717
## 2135 1848 1 0.700 0.008533 0.683 0.716
## 2141 1845 1 0.699 0.008537 0.682 0.716
## 2145 1844 3 0.698 0.008548 0.681 0.715
## 2147 1841 1 0.698 0.008552 0.681 0.714
## 2158 1834 1 0.698 0.008556 0.680 0.714
## 2160 1833 1 0.697 0.008560 0.680 0.714
## 2161 1832 1 0.697 0.008563 0.680 0.713
## 2172 1826 2 0.696 0.008571 0.679 0.712
## 2176 1823 1 0.696 0.008575 0.678 0.712
## 2179 1820 1 0.695 0.008579 0.678 0.712
## 2189 1810 1 0.695 0.008582 0.678 0.711
## 2195 1808 2 0.694 0.008590 0.677 0.711
## 2196 1806 2 0.693 0.008598 0.676 0.710
## 2197 1804 1 0.693 0.008602 0.676 0.709
## 2200 1801 1 0.693 0.008605 0.675 0.709
## 2209 1794 2 0.692 0.008613 0.675 0.708
## 2214 1791 1 0.691 0.008617 0.674 0.708
## 2216 1790 1 0.691 0.008621 0.674 0.708
## 2217 1789 1 0.691 0.008625 0.673 0.707
## 2220 1785 1 0.690 0.008629 0.673 0.707
## 2224 1783 1 0.690 0.008632 0.673 0.706
## 2225 1782 1 0.689 0.008636 0.672 0.706
## 2236 1776 1 0.689 0.008640 0.672 0.706
## 2241 1773 1 0.689 0.008644 0.671 0.705
## 2244 1771 1 0.688 0.008648 0.671 0.705
## 2246 1770 1 0.688 0.008652 0.671 0.704
## 2248 1769 1 0.687 0.008656 0.670 0.704
## 2251 1768 1 0.687 0.008659 0.670 0.704
## 2255 1765 1 0.687 0.008663 0.669 0.703
## 2258 1764 1 0.686 0.008667 0.669 0.703
## 2264 1760 1 0.686 0.008671 0.669 0.703
## 2272 1757 1 0.686 0.008675 0.668 0.702
## 2274 1756 1 0.685 0.008679 0.668 0.702
## 2282 1754 1 0.685 0.008682 0.667 0.701
## 2292 1752 1 0.684 0.008686 0.667 0.701
## 2294 1751 2 0.684 0.008694 0.666 0.700
## 2295 1748 1 0.683 0.008698 0.666 0.700
## 2298 1746 2 0.682 0.008705 0.665 0.699
## 2303 1743 1 0.682 0.008709 0.665 0.699
## 2304 1742 1 0.682 0.008713 0.664 0.698
## 2311 1741 1 0.681 0.008717 0.664 0.698
## 2315 1739 1 0.681 0.008720 0.663 0.698
## 2319 1737 1 0.680 0.008724 0.663 0.697
## 2327 1730 1 0.680 0.008728 0.663 0.697
## 2334 1727 1 0.680 0.008732 0.662 0.696
## 2345 1721 3 0.678 0.008743 0.661 0.695
## 2351 1716 1 0.678 0.008747 0.661 0.695
## 2353 1714 1 0.678 0.008751 0.660 0.695
## 2354 1713 1 0.677 0.008755 0.660 0.694
## 2358 1711 1 0.677 0.008759 0.659 0.694
## 2361 1710 1 0.677 0.008763 0.659 0.693
## 2365 1709 1 0.676 0.008766 0.659 0.693
## 2366 1708 1 0.676 0.008770 0.658 0.693
## 2367 1707 1 0.675 0.008774 0.658 0.692
## 2371 1704 1 0.675 0.008778 0.657 0.692
## 2374 1701 1 0.675 0.008782 0.657 0.691
## 2382 1695 1 0.674 0.008785 0.657 0.691
## 2391 1691 1 0.674 0.008789 0.656 0.691
## 2397 1689 1 0.673 0.008793 0.656 0.690
## 2398 1687 2 0.673 0.008801 0.655 0.689
## 2405 1685 1 0.672 0.008805 0.655 0.689
## 2406 1684 1 0.672 0.008808 0.654 0.689
## 2412 1679 1 0.671 0.008812 0.654 0.688
## 2414 1677 1 0.671 0.008816 0.653 0.688
## 2416 1672 1 0.671 0.008820 0.653 0.687
## 2417 1669 1 0.670 0.008824 0.652 0.687
## 2426 1664 1 0.670 0.008828 0.652 0.687
## 2429 1663 1 0.669 0.008832 0.652 0.686
## 2431 1660 2 0.669 0.008839 0.651 0.686
## 2437 1658 1 0.668 0.008843 0.650 0.685
## 2439 1657 1 0.668 0.008847 0.650 0.685
## 2441 1656 1 0.667 0.008851 0.650 0.684
## 2445 1654 1 0.667 0.008855 0.649 0.684
## 2446 1653 1 0.666 0.008859 0.649 0.684
## 2448 1652 1 0.666 0.008862 0.648 0.683
## 2450 1651 1 0.666 0.008866 0.648 0.683
## 2457 1648 1 0.665 0.008870 0.648 0.682
## 2458 1646 1 0.665 0.008874 0.647 0.682
## 2459 1644 1 0.664 0.008878 0.647 0.682
## 2460 1642 1 0.664 0.008881 0.646 0.681
## 2468 1639 1 0.664 0.008885 0.646 0.681
## 2472 1637 1 0.663 0.008889 0.646 0.680
## 2475 1635 2 0.662 0.008897 0.645 0.680
## 2477 1632 1 0.662 0.008900 0.644 0.679
## 2488 1629 1 0.662 0.008904 0.644 0.679
## 2493 1626 1 0.661 0.008908 0.643 0.678
## 2495 1625 1 0.661 0.008912 0.643 0.678
## 2497 1624 1 0.660 0.008916 0.643 0.678
## 2503 1619 1 0.660 0.008920 0.642 0.677
## 2509 1614 1 0.660 0.008923 0.642 0.677
## 2511 1612 2 0.659 0.008931 0.641 0.676
## 2517 1608 1 0.658 0.008935 0.641 0.676
## 2521 1602 1 0.658 0.008939 0.640 0.675
## 2527 1598 1 0.658 0.008943 0.640 0.675
## 2539 1590 1 0.657 0.008947 0.639 0.674
## 2542 1587 1 0.657 0.008951 0.639 0.674
## 2544 1586 1 0.656 0.008954 0.638 0.674
## 2545 1585 1 0.656 0.008958 0.638 0.673
## 2546 1584 1 0.655 0.008962 0.638 0.673
## 2547 1582 1 0.655 0.008966 0.637 0.672
## 2554 1574 1 0.655 0.008970 0.637 0.672
## 2558 1571 1 0.654 0.008974 0.636 0.672
## 2562 1567 1 0.654 0.008978 0.636 0.671
## 2564 1566 1 0.653 0.008982 0.635 0.671
## 2566 1564 1 0.653 0.008986 0.635 0.670
## 2569 1562 2 0.652 0.008994 0.634 0.669
## 2571 1556 1 0.652 0.008998 0.634 0.669
## 2580 1545 1 0.651 0.009002 0.633 0.669
## 2584 1542 1 0.651 0.009006 0.633 0.668
## 2589 1536 1 0.650 0.009010 0.632 0.668
## 2590 1533 4 0.649 0.009027 0.631 0.666
## 2591 1529 1 0.648 0.009031 0.630 0.666
## 2592 1526 1 0.648 0.009035 0.630 0.665
## 2605 1518 1 0.647 0.009039 0.629 0.665
## 2617 1511 1 0.647 0.009043 0.629 0.664
## 2620 1510 1 0.647 0.009047 0.629 0.664
## 2621 1507 1 0.646 0.009051 0.628 0.664
## 2622 1504 1 0.646 0.009055 0.628 0.663
## 2624 1501 1 0.645 0.009060 0.627 0.663
## 2636 1495 1 0.645 0.009064 0.627 0.662
## 2640 1490 1 0.644 0.009068 0.626 0.662
## 2647 1482 1 0.644 0.009072 0.626 0.662
## 2652 1480 2 0.643 0.009081 0.625 0.661
## 2660 1476 1 0.643 0.009085 0.625 0.660
## 2669 1467 1 0.642 0.009090 0.624 0.660
## 2687 1455 1 0.642 0.009094 0.624 0.659
## 2690 1452 1 0.641 0.009099 0.623 0.659
## 2699 1444 1 0.641 0.009103 0.623 0.658
## 2703 1439 1 0.641 0.009108 0.622 0.658
## 2704 1437 1 0.640 0.009112 0.622 0.658
## 2706 1434 1 0.640 0.009117 0.621 0.657
## 2708 1433 1 0.639 0.009121 0.621 0.657
## 2709 1432 1 0.639 0.009126 0.621 0.656
## 2714 1426 1 0.638 0.009130 0.620 0.656
## 2719 1421 1 0.638 0.009135 0.620 0.655
## 2723 1417 1 0.637 0.009140 0.619 0.655
## 2740 1407 1 0.637 0.009144 0.619 0.655
## 2744 1403 1 0.636 0.009149 0.618 0.654
## 2745 1401 1 0.636 0.009154 0.618 0.654
## 2753 1395 1 0.636 0.009159 0.617 0.653
## 2763 1385 1 0.635 0.009164 0.617 0.653
## 2774 1376 1 0.635 0.009169 0.616 0.652
## 2781 1375 1 0.634 0.009174 0.616 0.652
## 2788 1370 1 0.634 0.009178 0.615 0.651
## 2798 1364 1 0.633 0.009184 0.615 0.651
## 2815 1357 1 0.633 0.009189 0.614 0.651
## 2819 1352 1 0.632 0.009194 0.614 0.650
## 2830 1346 1 0.632 0.009199 0.614 0.650
## 2831 1345 1 0.631 0.009204 0.613 0.649
## 2837 1340 3 0.630 0.009220 0.612 0.648
## 2839 1336 1 0.630 0.009225 0.611 0.647
## 2842 1334 1 0.629 0.009230 0.611 0.647
## 2848 1328 1 0.629 0.009235 0.610 0.646
## 2855 1324 1 0.628 0.009240 0.610 0.646
## 2856 1322 1 0.628 0.009245 0.609 0.645
## 2858 1321 1 0.627 0.009251 0.609 0.645
## 2861 1317 1 0.627 0.009256 0.608 0.644
## 2866 1313 1 0.626 0.009261 0.608 0.644
## 2872 1309 1 0.626 0.009266 0.607 0.644
## 2878 1304 1 0.625 0.009272 0.607 0.643
## 2880 1302 1 0.625 0.009277 0.606 0.643
## 2881 1300 1 0.624 0.009282 0.606 0.642
## 2888 1291 1 0.624 0.009288 0.605 0.642
## 2890 1289 1 0.623 0.009293 0.605 0.641
## 2892 1285 1 0.623 0.009299 0.604 0.641
## 2899 1281 1 0.622 0.009304 0.604 0.640
## 2904 1278 1 0.622 0.009309 0.603 0.640
## 2905 1276 1 0.621 0.009315 0.603 0.639
## 2915 1262 1 0.621 0.009321 0.602 0.639
## 2917 1259 1 0.620 0.009326 0.602 0.638
## 2922 1251 1 0.620 0.009332 0.601 0.638
## 2923 1250 1 0.619 0.009338 0.601 0.637
## 2925 1248 1 0.619 0.009343 0.600 0.637
## 2927 1245 1 0.618 0.009349 0.600 0.636
## 2934 1240 2 0.617 0.009360 0.599 0.635
## 2935 1237 1 0.617 0.009366 0.598 0.635
## 2939 1234 1 0.616 0.009372 0.598 0.634
## 2948 1227 1 0.616 0.009378 0.597 0.634
## 2956 1220 1 0.615 0.009384 0.597 0.633
## 2958 1217 2 0.614 0.009395 0.596 0.632
## 2959 1214 1 0.614 0.009401 0.595 0.632
## 2960 1213 1 0.613 0.009407 0.595 0.631
## 2964 1209 1 0.613 0.009413 0.594 0.631
## 2966 1207 1 0.612 0.009419 0.594 0.631
## 2969 1202 1 0.612 0.009425 0.593 0.630
## 2978 1193 1 0.611 0.009431 0.593 0.630
## 2980 1191 1 0.611 0.009437 0.592 0.629
## 2995 1178 1 0.610 0.009443 0.591 0.629
## 3002 1171 1 0.610 0.009449 0.591 0.628
## 3019 1162 1 0.609 0.009456 0.590 0.627
## 3028 1158 1 0.609 0.009462 0.590 0.627
## 3029 1157 1 0.608 0.009469 0.589 0.626
## 3033 1155 1 0.608 0.009475 0.589 0.626
## 3048 1146 1 0.607 0.009482 0.588 0.625
## 3050 1144 1 0.607 0.009488 0.588 0.625
## 3055 1138 2 0.606 0.009502 0.587 0.624
## 3064 1121 1 0.605 0.009508 0.586 0.623
## 3075 1115 1 0.604 0.009515 0.586 0.623
## 3077 1112 1 0.604 0.009522 0.585 0.622
## 3084 1111 1 0.603 0.009529 0.584 0.622
## 3112 1090 1 0.603 0.009537 0.584 0.621
## 3124 1087 1 0.602 0.009544 0.583 0.621
## 3131 1081 1 0.602 0.009551 0.583 0.620
## 3134 1080 1 0.601 0.009559 0.582 0.620
## 3148 1069 1 0.601 0.009566 0.582 0.619
## 3160 1065 1 0.600 0.009574 0.581 0.618
## 3161 1064 1 0.599 0.009582 0.580 0.618
## 3164 1062 1 0.599 0.009589 0.580 0.617
## 3175 1054 1 0.598 0.009597 0.579 0.617
## 3197 1042 1 0.598 0.009605 0.579 0.616
## 3208 1033 1 0.597 0.009613 0.578 0.616
## 3230 1010 1 0.597 0.009622 0.577 0.615
## 3231 1009 1 0.596 0.009630 0.577 0.615
## 3237 1005 1 0.595 0.009639 0.576 0.614
## 3238 1001 1 0.595 0.009648 0.576 0.613
## 3240 999 1 0.594 0.009656 0.575 0.613
## 3243 995 1 0.594 0.009665 0.574 0.612
## 3252 981 1 0.593 0.009674 0.574 0.612
## 3257 979 1 0.592 0.009683 0.573 0.611
## 3259 978 1 0.592 0.009692 0.573 0.611
## 3266 973 1 0.591 0.009701 0.572 0.610
## 3274 965 1 0.591 0.009710 0.571 0.609
## 3276 959 1 0.590 0.009720 0.571 0.609
## 3285 954 1 0.589 0.009729 0.570 0.608
## 3289 952 1 0.589 0.009739 0.569 0.608
## 3293 945 1 0.588 0.009748 0.569 0.607
## 3311 928 2 0.587 0.009768 0.567 0.606
## 3315 925 1 0.586 0.009779 0.567 0.605
## 3316 924 1 0.586 0.009788 0.566 0.604
## 3327 910 1 0.585 0.009799 0.565 0.604
## 3344 901 1 0.584 0.009809 0.565 0.603
## 3348 899 1 0.584 0.009820 0.564 0.603
## 3353 894 1 0.583 0.009831 0.563 0.602
## 3359 889 1 0.582 0.009842 0.563 0.601
## 3362 885 2 0.581 0.009863 0.561 0.600
## 3368 879 1 0.580 0.009874 0.561 0.599
## 3371 877 1 0.580 0.009885 0.560 0.599
## 3372 874 1 0.579 0.009896 0.559 0.598
## 3386 865 1 0.578 0.009907 0.559 0.597
## 3397 855 1 0.578 0.009919 0.558 0.597
## 3405 846 1 0.577 0.009930 0.557 0.596
## 3425 834 1 0.576 0.009942 0.557 0.595
## 3429 832 1 0.576 0.009955 0.556 0.595
## 3471 804 1 0.575 0.009968 0.555 0.594
## 3472 802 1 0.574 0.009981 0.554 0.593
## 3488 791 1 0.573 0.009995 0.554 0.593
## 3495 786 1 0.573 0.010009 0.553 0.592
## 3497 784 1 0.572 0.010023 0.552 0.591
## 3499 782 1 0.571 0.010037 0.551 0.591
## 3505 777 1 0.570 0.010051 0.551 0.590
## 3516 770 1 0.570 0.010065 0.550 0.589
## 3522 769 1 0.569 0.010079 0.549 0.589
## 3523 768 1 0.568 0.010093 0.548 0.588
## 3533 766 1 0.568 0.010107 0.547 0.587
## 3539 761 2 0.566 0.010135 0.546 0.586
## 3540 759 1 0.565 0.010149 0.545 0.585
## 3541 757 1 0.565 0.010163 0.544 0.584
## 3542 754 1 0.564 0.010177 0.544 0.583
## 3553 749 1 0.563 0.010192 0.543 0.583
## 3557 745 1 0.562 0.010206 0.542 0.582
## 3560 742 1 0.562 0.010220 0.541 0.581
## 3561 740 1 0.561 0.010235 0.540 0.581
## 3565 738 1 0.560 0.010249 0.540 0.580
## 3586 725 1 0.559 0.010264 0.539 0.579
## 3587 724 1 0.558 0.010279 0.538 0.578
## 3591 721 1 0.558 0.010294 0.537 0.578
## 3596 718 1 0.557 0.010309 0.536 0.577
## 3600 715 1 0.556 0.010323 0.536 0.576
## 3608 708 1 0.555 0.010339 0.535 0.575
## 3632 700 1 0.555 0.010354 0.534 0.575
## 3634 698 1 0.554 0.010370 0.533 0.574
## 3648 688 1 0.553 0.010386 0.532 0.573
## 3650 687 1 0.552 0.010402 0.532 0.572
## 3657 679 1 0.551 0.010419 0.531 0.572
## 3660 678 1 0.551 0.010435 0.530 0.571
## 3669 667 1 0.550 0.010452 0.529 0.570
## 3673 663 1 0.549 0.010469 0.528 0.569
## 3688 654 1 0.548 0.010486 0.527 0.568
## 3690 651 2 0.546 0.010522 0.525 0.567
## 3694 648 1 0.546 0.010539 0.525 0.566
## 3700 646 1 0.545 0.010557 0.524 0.565
## 3705 638 1 0.544 0.010574 0.523 0.564
## 3714 629 1 0.543 0.010593 0.522 0.563
## 3733 613 1 0.542 0.010613 0.521 0.563
## 3736 610 1 0.541 0.010632 0.520 0.562
## 3759 589 1 0.540 0.010654 0.519 0.561
## 3765 584 1 0.539 0.010676 0.518 0.560
## 3766 583 1 0.538 0.010697 0.517 0.559
## 3775 574 2 0.537 0.010742 0.515 0.557
## 3792 559 1 0.536 0.010766 0.514 0.556
## 3795 557 1 0.535 0.010789 0.513 0.555
## 3802 554 1 0.534 0.010813 0.512 0.555
## 3803 552 1 0.533 0.010836 0.511 0.554
## 3813 543 2 0.531 0.010885 0.509 0.552
## 3829 532 1 0.530 0.010910 0.508 0.551
## 3842 521 1 0.529 0.010936 0.507 0.550
## 3843 519 1 0.528 0.010963 0.506 0.549
## 3849 516 1 0.527 0.010989 0.505 0.548
## 3855 513 1 0.526 0.011015 0.504 0.547
## 3862 510 1 0.525 0.011042 0.503 0.546
## 3869 502 1 0.524 0.011069 0.502 0.545
## 3877 497 1 0.523 0.011097 0.501 0.544
## 3885 494 1 0.521 0.011125 0.499 0.543
## 3888 492 1 0.520 0.011153 0.498 0.542
## 3898 484 1 0.519 0.011181 0.497 0.541
## 3902 479 1 0.518 0.011210 0.496 0.540
## 3906 476 1 0.517 0.011240 0.495 0.539
## 3910 470 1 0.516 0.011269 0.494 0.538
## 3932 453 1 0.515 0.011302 0.493 0.537
## 3954 441 1 0.514 0.011336 0.491 0.536
## 3977 431 1 0.513 0.011373 0.490 0.535
## 3980 428 1 0.511 0.011409 0.489 0.533
## 3981 427 1 0.510 0.011445 0.487 0.532
## 3988 422 1 0.509 0.011481 0.486 0.531
## 3989 421 1 0.508 0.011518 0.485 0.530
## 3990 420 1 0.507 0.011553 0.484 0.529
## 4005 410 1 0.505 0.011591 0.482 0.528
## 4015 405 1 0.504 0.011630 0.481 0.527
## 4025 401 1 0.503 0.011668 0.480 0.525
## 4032 391 2 0.500 0.011749 0.477 0.523
## 4033 389 1 0.499 0.011789 0.476 0.522
## 4045 385 1 0.498 0.011830 0.474 0.521
## 4050 381 1 0.496 0.011871 0.473 0.519
## 4051 380 1 0.495 0.011911 0.471 0.518
## 4059 375 1 0.494 0.011952 0.470 0.517
## 4061 373 1 0.492 0.011993 0.469 0.516
## 4065 371 1 0.491 0.012034 0.467 0.514
## 4073 365 1 0.490 0.012076 0.466 0.513
## 4110 349 1 0.488 0.012123 0.464 0.512
## 4118 346 1 0.487 0.012169 0.463 0.510
## 4125 343 1 0.485 0.012217 0.461 0.509
## 4138 340 1 0.484 0.012264 0.460 0.508
## 4159 323 1 0.483 0.012317 0.458 0.506
## 4160 321 1 0.481 0.012370 0.457 0.505
## 4194 312 1 0.479 0.012426 0.455 0.504
## 4231 300 1 0.478 0.012487 0.453 0.502
## 4233 298 1 0.476 0.012548 0.451 0.501
## 4239 297 1 0.475 0.012607 0.450 0.499
## 4309 275 1 0.473 0.012679 0.448 0.498
## 4339 268 1 0.471 0.012754 0.446 0.496
## 4433 240 1 0.469 0.012851 0.444 0.494
## 4508 206 1 0.467 0.012989 0.441 0.492
## 4543 199 1 0.465 0.013134 0.439 0.490
## 4573 192 1 0.462 0.013287 0.436 0.488
## 4590 188 1 0.460 0.013442 0.433 0.486
## 4614 182 1 0.457 0.013603 0.430 0.484
## 4634 173 1 0.455 0.013779 0.427 0.481
## 4654 168 1 0.452 0.013960 0.424 0.479
## 4673 163 1 0.449 0.014147 0.421 0.477
## 4690 160 1 0.446 0.014334 0.418 0.474
## 4751 148 1 0.443 0.014551 0.415 0.472
## 4782 140 1 0.440 0.014787 0.411 0.469
## 4834 130 1 0.437 0.015056 0.407 0.466
## 4839 129 1 0.433 0.015315 0.403 0.463
## 4880 124 1 0.430 0.015585 0.399 0.460
## 4895 122 1 0.426 0.015851 0.395 0.457
## 4907 120 1 0.423 0.016112 0.391 0.454
## 4972 106 1 0.419 0.016446 0.386 0.451
## 4983 103 1 0.415 0.016782 0.382 0.447
## 5043 96 1 0.410 0.017154 0.377 0.444
## 5186 74 1 0.405 0.017796 0.370 0.440
## 5266 60 1 0.398 0.018735 0.361 0.435
## 5291 57 1 0.391 0.019665 0.353 0.429
## 5653 32 1 0.379 0.022530 0.335 0.423
## 5675 31 1 0.367 0.024899 0.318 0.415
## 5762 27 1 0.353 0.027431 0.300 0.407
## 5830 23 1 0.338 0.030230 0.279 0.397
## 5845 22 1 0.322 0.032521 0.260 0.387
## 6051 13 1 0.298 0.038325 0.225 0.374
## 6233 9 1 0.265 0.046177 0.179 0.358
plot(km,conf.int=F,mark.time=F,
xlab="time (days)",
ylab="probability",
main="Kaplan-Meier estimates of time to death")
survdiff(Surv(dtime, death)~meno,data=rotterdam)
## Call:
## survdiff(formula = Surv(dtime, death) ~ meno, data = rotterdam)
##
## N Observed Expected (O-E)^2/E (O-E)^2/V
## meno=0 1312 468 600 29.0 55.1
## meno=1 1670 804 672 25.9 55.1
##
## Chisq= 55.1 on 1 degrees of freedom, p= 1e-13
# Fit a multiple Cox PH model with age and edema
fit.coxph <- coxph(Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr + er + hormon + chemo,
data = rotterdam_train, x=TRUE,y=TRUE)
summary(fit.coxph)
## Call:
## coxph(formula = Surv(dtime, death) ~ age + meno + size + grade +
## nodes + pgr + er + hormon + chemo, data = rotterdam_train,
## x = TRUE, y = TRUE)
##
## n= 2535, number of events= 1085
##
## coef exp(coef) se(coef) z Pr(>|z|)
## age 0.0148407 1.0149514 0.0041637 3.564 0.000365 ***
## meno 0.1216006 1.1293029 0.1089300 1.116 0.264286
## size20-50 0.4511986 1.5701931 0.0708835 6.365 1.95e-10 ***
## size>50 0.8355036 2.3059750 0.0986485 8.470 < 2e-16 ***
## grade 0.3304171 1.3915484 0.0769025 4.297 1.73e-05 ***
## nodes 0.0752537 1.0781576 0.0054128 13.903 < 2e-16 ***
## pgr -0.0003649 0.9996352 0.0001333 -2.737 0.006206 **
## er -0.0001062 0.9998938 0.0001191 -0.892 0.372584
## hormon -0.1533601 0.8578208 0.0957077 -1.602 0.109072
## chemo 0.0299528 1.0304059 0.0879488 0.341 0.733427
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## exp(coef) exp(-coef) lower .95 upper .95
## age 1.0150 0.9853 1.0067 1.0233
## meno 1.1293 0.8855 0.9122 1.3981
## size20-50 1.5702 0.6369 1.3665 1.8042
## size>50 2.3060 0.4337 1.9006 2.7978
## grade 1.3915 0.7186 1.1968 1.6179
## nodes 1.0782 0.9275 1.0668 1.0897
## pgr 0.9996 1.0004 0.9994 0.9999
## er 0.9999 1.0001 0.9997 1.0001
## hormon 0.8578 1.1657 0.7111 1.0348
## chemo 1.0304 0.9705 0.8673 1.2243
##
## Concordance= 0.695 (se = 0.008 )
## Likelihood ratio test= 470.4 on 10 df, p=<2e-16
## Wald test = 541.7 on 10 df, p=<2e-16
## Score (logrank) test = 613.3 on 10 df, p=<2e-16
# R.squared and L.squared of Cox PH model
pam.coxph(fit.coxph)
## $R.squared
## [1] "0.1987"
##
## $L.squared
## [1] "0.7150"
concordance(fit.coxph, timewt = "n/G2")$concordance
## [1] 0.6611864
fit.exp <- survreg(Surv(dtime, death) ~ age + meno + size +
grade + nodes + pgr + er + hormon + chemo,
data = rotterdam_train, dist="exponential",
x = TRUE, y = TRUE)
summary(fit.exp)
##
## Call:
## survreg(formula = Surv(dtime, death) ~ age + meno + size + grade +
## nodes + pgr + er + hormon + chemo, data = rotterdam_train,
## dist = "exponential", x = TRUE, y = TRUE)
## Value Std. Error z p
## (Intercept) 1.08e+01 2.94e-01 36.88 < 2e-16
## age -1.38e-02 4.12e-03 -3.36 0.00078
## meno -1.19e-01 1.09e-01 -1.10 0.27138
## size20-50 -4.44e-01 7.08e-02 -6.27 3.7e-10
## size>50 -7.87e-01 9.86e-02 -7.98 1.5e-15
## grade -3.12e-01 7.69e-02 -4.06 4.9e-05
## nodes -7.08e-02 5.44e-03 -13.02 < 2e-16
## pgr 3.42e-04 1.33e-04 2.58 0.00988
## er 8.62e-05 1.19e-04 0.73 0.46743
## hormon 1.74e-01 9.54e-02 1.82 0.06862
## chemo -3.75e-02 8.80e-02 -0.43 0.66960
##
## Scale fixed at 1
##
## Exponential distribution
## Loglik(model)= -10317.3 Loglik(intercept only)= -10533.6
## Chisq= 432.64 on 10 degrees of freedom, p= 1e-86
## Number of Newton-Raphson Iterations: 5
## n= 2535
# R.squared and L.squared of exponential model
# when data has factors (size), need to pass data directly as shown below
pam.survreg(fit.exp, x.mat = rotterdam_train)
## $R.squared
## [1] "0.2114"
##
## $L.squared
## [1] "0.0879"
concordance(fit.exp, timewt = "n/G2")$concordance
## [1] 0.661674
fit.wei <- survreg(Surv(dtime, death) ~ age + meno + size +
grade + nodes + pgr + er + hormon + chemo,
data = rotterdam_train, dist="weibull",
x = TRUE, y = TRUE)
summary(fit.wei)
##
## Call:
## survreg(formula = Surv(dtime, death) ~ age + meno + size + grade +
## nodes + pgr + er + hormon + chemo, data = rotterdam_train,
## dist = "weibull", x = TRUE, y = TRUE)
## Value Std. Error z p
## (Intercept) 1.02e+01 2.23e-01 45.77 < 2e-16
## age -1.15e-02 3.08e-03 -3.73 0.00019
## meno -8.05e-02 8.06e-02 -1.00 0.31784
## size20-50 -3.37e-01 5.31e-02 -6.34 2.3e-10
## size>50 -6.34e-01 7.37e-02 -8.61 < 2e-16
## grade -2.48e-01 5.71e-02 -4.34 1.4e-05
## nodes -5.66e-02 4.12e-03 -13.73 < 2e-16
## pgr 2.67e-04 9.87e-05 2.71 0.00682
## er 8.42e-05 8.82e-05 0.96 0.33939
## hormon 8.20e-02 7.09e-02 1.16 0.24738
## chemo -2.15e-02 6.50e-02 -0.33 0.74105
## Log(scale) -3.00e-01 2.61e-02 -11.52 < 2e-16
##
## Scale= 0.741
##
## Weibull distribution
## Loglik(model)= -10259 Loglik(intercept only)= -10503.6
## Chisq= 489.27 on 10 degrees of freedom, p= 8.7e-99
## Number of Newton-Raphson Iterations: 5
## n= 2535
# R.squared and L.squared of exponential model
# when data has factors (size), need to pass data directly as shown below
pam.survreg(fit.wei, x.mat = rotterdam_train)
## $R.squared
## [1] "0.2138"
##
## $L.squared
## [1] "0.2789"
concordance(fit.wei, timewt = "n/G2")$concordance
## [1] 0.6611702
fit.lognormal <- survreg(Surv(dtime, death) ~ age + meno + size +
grade + nodes + pgr + er + hormon + chemo,
data = rotterdam_train, dist="lognormal",
x = TRUE, y = TRUE)
summary(fit.lognormal)
##
## Call:
## survreg(formula = Surv(dtime, death) ~ age + meno + size + grade +
## nodes + pgr + er + hormon + chemo, data = rotterdam_train,
## dist = "lognormal", x = TRUE, y = TRUE)
## Value Std. Error z p
## (Intercept) 9.95e+00 2.28e-01 43.58 < 2e-16
## age -9.70e-03 3.26e-03 -2.98 0.00289
## meno -1.26e-01 8.81e-02 -1.42 0.15438
## size20-50 -3.53e-01 5.51e-02 -6.40 1.6e-10
## size>50 -6.16e-01 8.66e-02 -7.11 1.2e-12
## grade -2.70e-01 5.97e-02 -4.53 6.0e-06
## nodes -7.67e-02 5.94e-03 -12.91 < 2e-16
## pgr 3.65e-04 1.00e-04 3.64 0.00028
## er 1.81e-04 9.87e-05 1.83 0.06721
## hormon 2.28e-01 8.10e-02 2.81 0.00491
## chemo 9.99e-04 7.12e-02 0.01 0.98880
## Log(scale) 5.87e-02 2.34e-02 2.51 0.01220
##
## Scale= 1.06
##
## Log Normal distribution
## Loglik(model)= -10222.4 Loglik(intercept only)= -10468.8
## Chisq= 492.82 on 10 degrees of freedom, p= 1.5e-99
## Number of Newton-Raphson Iterations: 4
## n= 2535
# R.squared and L.squared of the lognormal model
pam.survreg(fit.lognormal, x.mat = rotterdam_train)
## $R.squared
## [1] "0.2099"
##
## $L.squared
## [1] "0.0812"
concordance(fit.lognormal, timewt = "n/G2")$concordance
## [1] 0.6618683
We first consider variable selection under the Cox proportional hazards model.
Stepwise forward selection via AIC:
full_model_formula <- Surv(dtime, death) ~ age + meno + size +
grade + nodes + pgr + er + hormon + chemo
null_model_formula <- Surv(dtime, death) ~ 1
rot_sfAIC <- stepAIC(coxph(null_model_formula, data = rotterdam_train,
x = TRUE, y = TRUE),
full_model_formula,
direction = "forward", k = 2)
## Start: AIC=15919.81
## Surv(dtime, death) ~ 1
##
## Df AIC
## + nodes 1 15637
## + size 2 15704
## + age 1 15850
## + meno 1 15860
## + grade 1 15869
## + pgr 1 15898
## + hormon 1 15908
## <none> 15920
## + er 1 15922
## + chemo 1 15922
##
## Step: AIC=15636.66
## Surv(dtime, death) ~ nodes
##
## Df AIC
## + size 2 15541
## + age 1 15581
## + meno 1 15596
## + grade 1 15602
## + pgr 1 15621
## + chemo 1 15631
## <none> 15637
## + hormon 1 15638
## + er 1 15638
##
## Step: AIC=15540.9
## Surv(dtime, death) ~ nodes + size
##
## Df AIC
## + age 1 15497
## + meno 1 15502
## + grade 1 15519
## + pgr 1 15529
## + chemo 1 15534
## <none> 15541
## + er 1 15543
## + hormon 1 15543
##
## Step: AIC=15496.86
## Surv(dtime, death) ~ nodes + size + age
##
## Df AIC
## + grade 1 15475
## + pgr 1 15483
## + er 1 15496
## + meno 1 15496
## <none> 15497
## + hormon 1 15498
## + chemo 1 15499
##
## Step: AIC=15474.58
## Surv(dtime, death) ~ nodes + size + age + grade
##
## Df AIC
## + pgr 1 15466
## + er 1 15474
## <none> 15475
## + hormon 1 15475
## + meno 1 15475
## + chemo 1 15477
##
## Step: AIC=15465.76
## Surv(dtime, death) ~ nodes + size + age + grade + pgr
##
## Df AIC
## + hormon 1 15465
## <none> 15466
## + meno 1 15467
## + er 1 15467
## + chemo 1 15468
##
## Step: AIC=15465.36
## Surv(dtime, death) ~ nodes + size + age + grade + pgr + hormon
##
## Df AIC
## <none> 15465
## + meno 1 15466
## + er 1 15467
## + chemo 1 15467
summary(rot_sfAIC)
## Call:
## coxph(formula = Surv(dtime, death) ~ nodes + size + age + grade +
## pgr + hormon, data = rotterdam_train, x = TRUE, y = TRUE)
##
## n= 2535, number of events= 1085
##
## coef exp(coef) se(coef) z Pr(>|z|)
## nodes 0.0762471 1.0792292 0.0052924 14.407 < 2e-16 ***
## size20-50 0.4472209 1.5639598 0.0707440 6.322 2.59e-10 ***
## size>50 0.8282099 2.2892172 0.0978057 8.468 < 2e-16 ***
## age 0.0174189 1.0175715 0.0024469 7.119 1.09e-12 ***
## grade 0.3338929 1.3963936 0.0767333 4.351 1.35e-05 ***
## pgr -0.0004071 0.9995929 0.0001287 -3.164 0.00155 **
## hormon -0.1450424 0.8649856 0.0952331 -1.523 0.12775
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## exp(coef) exp(-coef) lower .95 upper .95
## nodes 1.0792 0.9266 1.0681 1.0905
## size20-50 1.5640 0.6394 1.3615 1.7966
## size>50 2.2892 0.4368 1.8899 2.7729
## age 1.0176 0.9827 1.0127 1.0225
## grade 1.3964 0.7161 1.2014 1.6230
## pgr 0.9996 1.0004 0.9993 0.9998
## hormon 0.8650 1.1561 0.7177 1.0425
##
## Concordance= 0.694 (se = 0.008 )
## Likelihood ratio test= 468.4 on 7 df, p=<2e-16
## Wald test = 541 on 7 df, p=<2e-16
## Score (logrank) test = 608.1 on 7 df, p=<2e-16
pam.coxph(rot_sfAIC)
## $R.squared
## [1] "0.1998"
##
## $L.squared
## [1] "0.7135"
concordance(rot_sfAIC, timewt = "n/G2")$concordance
## [1] 0.6576749
Stepwise backward selection via AIC:
rot_sbAIC <- stepAIC(coxph(full_model_formula,
data = rotterdam_train,
x = TRUE, y = TRUE),
full_model_formula,
direction = "backward", k = 2)
## Start: AIC=15469.44
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon + chemo
##
## Df AIC
## - chemo 1 15468
## - er 1 15468
## - meno 1 15469
## <none> 15469
## - hormon 1 15470
## - pgr 1 15476
## - age 1 15480
## - grade 1 15487
## - size 2 15542
## - nodes 1 15618
##
## Step: AIC=15467.56
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon
##
## Df AIC
## - er 1 15466
## - meno 1 15467
## <none> 15468
## - hormon 1 15468
## - pgr 1 15474
## - age 1 15478
## - grade 1 15485
## - size 2 15541
## - nodes 1 15623
##
## Step: AIC=15466.4
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## hormon
##
## Df AIC
## - meno 1 15465
## <none> 15466
## - hormon 1 15467
## - pgr 1 15475
## - age 1 15477
## - grade 1 15484
## - size 2 15540
## - nodes 1 15622
##
## Step: AIC=15465.36
## Surv(dtime, death) ~ age + size + grade + nodes + pgr + hormon
##
## Df AIC
## <none> 15465
## - hormon 1 15466
## - pgr 1 15475
## - grade 1 15484
## - age 1 15514
## - size 2 15538
## - nodes 1 15623
summary(rot_sbAIC)
## Call:
## coxph(formula = Surv(dtime, death) ~ age + size + grade + nodes +
## pgr + hormon, data = rotterdam_train, x = TRUE, y = TRUE)
##
## n= 2535, number of events= 1085
##
## coef exp(coef) se(coef) z Pr(>|z|)
## age 0.0174189 1.0175715 0.0024469 7.119 1.09e-12 ***
## size20-50 0.4472209 1.5639598 0.0707440 6.322 2.59e-10 ***
## size>50 0.8282099 2.2892172 0.0978057 8.468 < 2e-16 ***
## grade 0.3338929 1.3963936 0.0767333 4.351 1.35e-05 ***
## nodes 0.0762471 1.0792292 0.0052924 14.407 < 2e-16 ***
## pgr -0.0004071 0.9995929 0.0001287 -3.164 0.00155 **
## hormon -0.1450424 0.8649856 0.0952331 -1.523 0.12775
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## exp(coef) exp(-coef) lower .95 upper .95
## age 1.0176 0.9827 1.0127 1.0225
## size20-50 1.5640 0.6394 1.3615 1.7966
## size>50 2.2892 0.4368 1.8899 2.7729
## grade 1.3964 0.7161 1.2014 1.6230
## nodes 1.0792 0.9266 1.0681 1.0905
## pgr 0.9996 1.0004 0.9993 0.9998
## hormon 0.8650 1.1561 0.7177 1.0425
##
## Concordance= 0.694 (se = 0.008 )
## Likelihood ratio test= 468.4 on 7 df, p=<2e-16
## Wald test = 541 on 7 df, p=<2e-16
## Score (logrank) test = 608.1 on 7 df, p=<2e-16
pam.coxph(rot_sbAIC)
## $R.squared
## [1] "0.1998"
##
## $L.squared
## [1] "0.7135"
concordance(rot_sbAIC, timewt = "n/G2")$concordance
## [1] 0.6576749
Forward and backward selection via AIC result in the same model.
Stepwise forward selection via BIC:
rot_sfBIC <- stepAIC(coxph(null_model_formula, data = rotterdam_train,
x = TRUE, y = TRUE),
full_model_formula,
direction = "forward", k = log(n))
## Start: AIC=15919.81
## Surv(dtime, death) ~ 1
##
## Df AIC
## + nodes 1 15643
## + size 2 15716
## + age 1 15856
## + meno 1 15866
## + grade 1 15876
## + pgr 1 15904
## + hormon 1 15914
## <none> 15920
## + er 1 15928
## + chemo 1 15928
##
## Step: AIC=15642.66
## Surv(dtime, death) ~ nodes
##
## Df AIC
## + size 2 15559
## + age 1 15593
## + meno 1 15608
## + grade 1 15614
## + pgr 1 15633
## <none> 15643
## + chemo 1 15643
## + hormon 1 15650
## + er 1 15650
##
## Step: AIC=15558.9
## Surv(dtime, death) ~ nodes + size
##
## Df AIC
## + age 1 15521
## + meno 1 15526
## + grade 1 15543
## + pgr 1 15553
## + chemo 1 15558
## <none> 15559
## + er 1 15567
## + hormon 1 15567
##
## Step: AIC=15520.86
## Surv(dtime, death) ~ nodes + size + age
##
## Df AIC
## + grade 1 15505
## + pgr 1 15513
## <none> 15521
## + er 1 15526
## + meno 1 15526
## + hormon 1 15528
## + chemo 1 15529
##
## Step: AIC=15504.58
## Surv(dtime, death) ~ nodes + size + age + grade
##
## Df AIC
## + pgr 1 15502
## <none> 15505
## + er 1 15510
## + hormon 1 15511
## + meno 1 15511
## + chemo 1 15513
##
## Step: AIC=15501.76
## Surv(dtime, death) ~ nodes + size + age + grade + pgr
##
## Df AIC
## <none> 15502
## + hormon 1 15507
## + meno 1 15509
## + er 1 15509
## + chemo 1 15510
summary(rot_sfBIC)
## Call:
## coxph(formula = Surv(dtime, death) ~ nodes + size + age + grade +
## pgr, data = rotterdam_train, x = TRUE, y = TRUE)
##
## n= 2535, number of events= 1085
##
## coef exp(coef) se(coef) z Pr(>|z|)
## nodes 0.0752090 1.0781095 0.0052924 14.211 < 2e-16 ***
## size20-50 0.4429197 1.5572473 0.0706760 6.267 3.68e-10 ***
## size>50 0.8247409 2.2812896 0.0977465 8.438 < 2e-16 ***
## age 0.0167237 1.0168644 0.0024124 6.932 4.14e-12 ***
## grade 0.3284614 1.3888297 0.0766679 4.284 1.83e-05 ***
## pgr -0.0003985 0.9996016 0.0001285 -3.102 0.00192 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## exp(coef) exp(-coef) lower .95 upper .95
## nodes 1.0781 0.9275 1.0670 1.0894
## size20-50 1.5572 0.6422 1.3558 1.7886
## size>50 2.2813 0.4383 1.8836 2.7630
## age 1.0169 0.9834 1.0121 1.0217
## grade 1.3888 0.7200 1.1951 1.6140
## pgr 0.9996 1.0004 0.9993 0.9999
##
## Concordance= 0.693 (se = 0.008 )
## Likelihood ratio test= 466 on 6 df, p=<2e-16
## Wald test = 534.8 on 6 df, p=<2e-16
## Score (logrank) test = 600.3 on 6 df, p=<2e-16
pam.coxph(rot_sfBIC)
## $R.squared
## [1] "0.2048"
##
## $L.squared
## [1] "0.7115"
concordance(rot_sfBIC, timewt = "n/G2")$concordance
## [1] 0.6552926
Stepwise backward selection via BIC:
rot_sbBIC <- stepAIC(coxph(full_model_formula,
data = rotterdam_train,
x = TRUE, y = TRUE),
full_model_formula,
direction = "backward", k = log(n))
## Start: AIC=15529.44
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon + chemo
##
## Df AIC
## - chemo 1 15522
## - er 1 15522
## - meno 1 15523
## - hormon 1 15524
## <none> 15529
## - pgr 1 15530
## - age 1 15534
## - grade 1 15541
## - size 2 15590
## - nodes 1 15672
##
## Step: AIC=15521.56
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon
##
## Df AIC
## - er 1 15514
## - meno 1 15515
## - hormon 1 15516
## <none> 15522
## - pgr 1 15522
## - age 1 15526
## - grade 1 15533
## - size 2 15583
## - nodes 1 15671
##
## Step: AIC=15514.4
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## hormon
##
## Df AIC
## - meno 1 15507
## - hormon 1 15509
## <none> 15514
## - pgr 1 15517
## - age 1 15519
## - grade 1 15526
## - size 2 15576
## - nodes 1 15664
##
## Step: AIC=15507.37
## Surv(dtime, death) ~ age + size + grade + nodes + pgr + hormon
##
## Df AIC
## - hormon 1 15502
## <none> 15507
## - pgr 1 15511
## - grade 1 15520
## - age 1 15550
## - size 2 15568
## - nodes 1 15659
##
## Step: AIC=15501.76
## Surv(dtime, death) ~ age + size + grade + nodes + pgr
##
## Df AIC
## <none> 15502
## - pgr 1 15505
## - grade 1 15513
## - age 1 15542
## - size 2 15562
## - nodes 1 15651
summary(rot_sbBIC)
## Call:
## coxph(formula = Surv(dtime, death) ~ age + size + grade + nodes +
## pgr, data = rotterdam_train, x = TRUE, y = TRUE)
##
## n= 2535, number of events= 1085
##
## coef exp(coef) se(coef) z Pr(>|z|)
## age 0.0167237 1.0168644 0.0024124 6.932 4.14e-12 ***
## size20-50 0.4429197 1.5572473 0.0706760 6.267 3.68e-10 ***
## size>50 0.8247409 2.2812896 0.0977465 8.438 < 2e-16 ***
## grade 0.3284614 1.3888297 0.0766679 4.284 1.83e-05 ***
## nodes 0.0752090 1.0781095 0.0052924 14.211 < 2e-16 ***
## pgr -0.0003985 0.9996016 0.0001285 -3.102 0.00192 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## exp(coef) exp(-coef) lower .95 upper .95
## age 1.0169 0.9834 1.0121 1.0217
## size20-50 1.5572 0.6422 1.3558 1.7886
## size>50 2.2813 0.4383 1.8836 2.7630
## grade 1.3888 0.7200 1.1951 1.6140
## nodes 1.0781 0.9275 1.0670 1.0894
## pgr 0.9996 1.0004 0.9993 0.9999
##
## Concordance= 0.693 (se = 0.008 )
## Likelihood ratio test= 466 on 6 df, p=<2e-16
## Wald test = 534.8 on 6 df, p=<2e-16
## Score (logrank) test = 600.3 on 6 df, p=<2e-16
pam.coxph(rot_sbBIC)
## $R.squared
## [1] "0.2048"
##
## $L.squared
## [1] "0.7115"
concordance(rot_sbBIC, timewt = "n/G2")$concordance
## [1] 0.6552926
We fit a LASSO model (selecting penalty via 10-fold cross-validation) and attach the resulting coefficients to a coxph object so that we can call survival::concordance:
library(ncvreg)
X <- rotterdam_train[, c("age", "meno", "size", "grade", "nodes", "pgr",
"er", "hormon", "chemo")]
X$`size20-50` <- ifelse(X$size == "20-50", 1, 0)
X$`size>50` <- ifelse(X$size == ">50", 1, 0)
X$size <- NULL
y <- Surv(rotterdam_train$dtime, rotterdam_train$death)
rot_cvlasso <- cv.ncvsurv(X, y,
nfolds = 10, nlambda = 50, penalty = "lasso")
rot_lasso <- ncvsurv(X, y, penalty = "lasso", lambda = rot_cvlasso$lambda.min)
lasso_beta <- coef(rot_lasso)
names(lasso_beta) <- colnames(X)
lasso_temp <- coxph(full_model_formula, data = rotterdam_train,
x = TRUE, y = TRUE)
lasso_beta <- lasso_beta[match(names(coef(lasso_temp)), names(lasso_beta))]
lasso_temp$coefficients <- lasso_beta
concordance(lasso_temp, timewt = "n/G2", newdata = rotterdam_train)
## Call:
## concordance.coxph(object = lasso_temp, newdata = rotterdam_train,
## timewt = "n/G2")
##
## n= 2535
## Concordance= 0.6618 se= 0.01667
## concordant discordant tied.x tied.y tied.xy
## 1948542.92 995739.42 18.28 222.53 0.00
weiAFT_sbAIC <- stepAIC(survreg(full_model_formula,
data = rotterdam_train, dist="weibull",
x = TRUE, y = TRUE),
data = rotterdam_train,
full_model_formula,
direction = "backward", k = 2)
## Start: AIC=20542.01
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon + chemo
##
## Df AIC
## - chemo 1 20540
## - er 1 20541
## - meno 1 20541
## - hormon 1 20541
## <none> 20542
## - pgr 1 20548
## - age 1 20554
## - grade 1 20560
## - size 2 20618
## - nodes 1 20695
##
## Step: AIC=20540.11
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon
##
## Df AIC
## - meno 1 20539
## - er 1 20539
## - hormon 1 20540
## <none> 20540
## - pgr 1 20546
## - age 1 20552
## - grade 1 20558
## - size 2 20617
## - nodes 1 20700
##
## Step: AIC=20539.05
## Surv(dtime, death) ~ age + size + grade + nodes + pgr + er +
## hormon
##
## Df AIC
## - er 1 20538
## - hormon 1 20538
## <none> 20539
## - pgr 1 20546
## - grade 1 20558
## - age 1 20588
## - size 2 20615
## - nodes 1 20702
##
## Step: AIC=20537.82
## Surv(dtime, death) ~ age + size + grade + nodes + pgr + hormon
##
## Df AIC
## - hormon 1 20537
## <none> 20538
## - pgr 1 20547
## - grade 1 20556
## - age 1 20588
## - size 2 20614
## - nodes 1 20701
##
## Step: AIC=20537.03
## Surv(dtime, death) ~ age + size + grade + nodes + pgr
##
## Df AIC
## <none> 20537
## - pgr 1 20546
## - grade 1 20555
## - age 1 20586
## - size 2 20613
## - nodes 1 20699
exAFT_sbAIC <- stepAIC(survreg(full_model_formula,
data = rotterdam_train, dist="exponential",
x = TRUE, y = TRUE),
data = rotterdam_train,
full_model_formula,
direction = "backward", k = 2)
## Start: AIC=20656.64
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon + chemo
##
## Df AIC
## - chemo 1 20655
## - er 1 20655
## - meno 1 20656
## <none> 20657
## - hormon 1 20658
## - pgr 1 20662
## - age 1 20666
## - grade 1 20672
## - size 2 20723
## - nodes 1 20788
##
## Step: AIC=20654.82
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon
##
## Df AIC
## - er 1 20653
## - meno 1 20654
## <none> 20655
## - hormon 1 20656
## - pgr 1 20660
## - age 1 20664
## - grade 1 20670
## - size 2 20722
## - nodes 1 20793
##
## Step: AIC=20653.39
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## hormon
##
## Df AIC
## - meno 1 20652
## <none> 20653
## - hormon 1 20655
## - pgr 1 20660
## - age 1 20662
## - grade 1 20669
## - size 2 20720
## - nodes 1 20792
##
## Step: AIC=20652.33
## Surv(dtime, death) ~ age + size + grade + nodes + pgr + hormon
##
## Df AIC
## <none> 20652
## - hormon 1 20654
## - pgr 1 20660
## - grade 1 20668
## - age 1 20696
## - size 2 20718
## - nodes 1 20793
lognormAFT_sbAIC <- stepAIC(survreg(full_model_formula,
data = rotterdam_train, dist="lognormal",
x = TRUE, y = TRUE),
data = rotterdam_train,
full_model_formula,
direction = "backward", k = 2)
## Start: AIC=20468.74
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon + chemo
##
## Df AIC
## - chemo 1 20467
## <none> 20469
## - meno 1 20469
## - er 1 20470
## - hormon 1 20475
## - age 1 20476
## - pgr 1 20480
## - grade 1 20488
## - size 2 20530
## - nodes 1 20633
##
## Step: AIC=20466.74
## Surv(dtime, death) ~ age + meno + size + grade + nodes + pgr +
## er + hormon
##
## Df AIC
## <none> 20467
## - meno 1 20467
## - er 1 20468
## - hormon 1 20473
## - age 1 20474
## - pgr 1 20478
## - grade 1 20486
## - size 2 20528
## - nodes 1 20637
weiAFT_sbAIC_R2L2 <- as.numeric(unlist(pam.survreg(weiAFT_sbAIC,
x.mat = rotterdam_train)))
exAFT_sbAIC_R2L2 <- as.numeric(unlist(pam.survreg(exAFT_sbAIC,
x.mat = rotterdam_train)))
lognormAFT_sbAIC_R2L2 <- as.numeric(unlist(pam.survreg(lognormAFT_sbAIC,
x.mat = rotterdam_train)))
R2L2_stats <- lapply(list(rot_sfAIC, rot_sfBIC, lasso_temp), function(x) {
as.numeric(unlist(pam.coxph(x)))
}) %>%
do.call(rbind, .) %>%
# cannot compute R2/L2 for time-dependent models?
# rbind(matrix(rep(NA, 4), 2, 2)) %>%
rbind(weiAFT_sbAIC_R2L2, exAFT_sbAIC_R2L2, lognormAFT_sbAIC_R2L2, .)
colnames(R2L2_stats) <- c("R squared", "L squared")
rownames(R2L2_stats) <- NULL
PA_stats <- data.frame(
Model = c(
"Weibull AFT, Stepwise Backward via AIC",
"Exponential AFT, Stepwise Backward via AIC",
"Lognormal AFT, Stepwise Backward via AIC",
"Cox Regression Stepwise Forward Selection via AIC",
"Cox Regression Stepwise Forward Selection via BIC",
"Cox Regression LASSO Estimate (10-fold CV)"
),
Concordance = c(
concordance(weiAFT_sbAIC, timewt = "n/G2",
newdata = rotterdam_test)$concordance,
concordance(exAFT_sbAIC, timewt = "n/G2",
newdata = rotterdam_test)$concordance,
concordance(lognormAFT_sbAIC, timewt = "n/G2",
newdata = rotterdam_test)$concordance,
concordance(rot_sfAIC, timewt = "n/G2",
newdata = rotterdam_test)$concordance,
concordance(rot_sfBIC, timewt = "n/G2",
newdata = rotterdam_test)$concordance,
concordance(lasso_temp, timewt = "n/G2",
newdata = rotterdam_test)$concordance
),
R2L2_stats
)
rownames(PA_stats) <- NULL
tbl3 <- PA_stats %>% gt() %>% tab_header("Table 3: Prediction Accuracy Measures on Test Data") %>%
tab_style(style = cell_text(weight = "bold"),
locations = list(cells_column_labels(), cells_title())) %>%
tab_footnote(
footnote = "Concordance calculated as Uno's C-statistic",
locations = cells_column_labels(columns = Concordance)
)
gt::gtsave(tbl3, file = file.path(getwd(), "tbl3.png"))
## Registered S3 method overwritten by 'webshot2':
## method from
## print.webshot webshot
tbl3
| Table 3: Prediction Accuracy Measures on Test Data | |||
| Model | Concordance1 | R.squared | L.squared |
|---|---|---|---|
| Weibull AFT, Stepwise Backward via AIC | 0.6685019 | 0.2209 | 0.2802 |
| Exponential AFT, Stepwise Backward via AIC | 0.6686837 | 0.2143 | 0.0888 |
| Lognormal AFT, Stepwise Backward via AIC | 0.6681475 | 0.2099 | 0.0811 |
| Cox Regression Stepwise Forward Selection via AIC | 0.6681764 | 0.1998 | 0.7135 |
| Cox Regression Stepwise Forward Selection via BIC | 0.6687657 | 0.2048 | 0.7115 |
| Cox Regression LASSO Estimate (10-fold CV) | 0.6651586 | 0.1983 | 0.7182 |
| 1 Concordance calculated as Uno's C-statistic | |||
fiveyear<-5*365
tenyear<-10*365
# link method to SurvMetrics function because calPlot depends on calling
# predictSurvProb method specifically >:(
predictSurvProb.survreg <- function(object, newdata, times, ...) {
SurvMetrics::predictSurvProb2survreg(object, newdata, times, ...)
}
myCalPlotSettings <- function(model, legend.label = "Predicted survival",
newdata = rotterdam_test, ...) {
pec::calPlot(model, data = newdata, method = "quantile",
q = 10, bars=TRUE, type="survival", pseudo=FALSE,
legend.legend = c(legend.label, "Kaplan-Meier Estimate"), ...)
}
ND.chisquare.test<-function(data){
data$diff<-(data$Model.1.Obs-data$Model.1.Pred)^2/(data$Model.1.Pred*(1-data$Model.1.Pred))
TS<-sum(data$diff)
pvalue<-1-pchisq(TS,nrow(data)-1)
results<-c(TS = round(TS,3), pvalue = round(pvalue,5))
print(results)
}
myCalPlotSettings(weiAFT_sbAIC, "Weibull AFT Prediction", time = fiveyear)
myCalPlotSettings(weiAFT_sbAIC, "Weibull AFT Prediction", time = tenyear)
weiAFT_sbAIC.5year<-as.data.frame(myCalPlotSettings(weiAFT_sbAIC, "Weibull AFT Prediction", time = fiveyear)$plotFrames)
ND.chisquare.test(weiAFT_sbAIC.5year)
## TS pvalue
## 0.055 1.000
weiAFT_sbAIC.10year<-as.data.frame(myCalPlotSettings(weiAFT_sbAIC, "Weibull AFT Prediction", time = tenyear)$plotFrames)
ND.chisquare.test(weiAFT_sbAIC.10year)
## TS pvalue
## 0.37400 0.99999
myCalPlotSettings(exAFT_sbAIC, "Exponential AFT Prediction", time = fiveyear)
myCalPlotSettings(exAFT_sbAIC, "Exponential AFT Prediction", time = tenyear)
exAFT_sbAIC.5year<-as.data.frame(myCalPlotSettings(exAFT_sbAIC, "Exponential AFT Prediction", time = fiveyear)$plotFrames)
ND.chisquare.test(exAFT_sbAIC.5year)
## TS pvalue
## 0.167 1.000
exAFT_sbAIC.10year<-as.data.frame(myCalPlotSettings(exAFT_sbAIC, "Exponential AFT Prediction", time = tenyear)$plotFrames)
ND.chisquare.test(exAFT_sbAIC.10year)
## TS pvalue
## 0.286 1.000
myCalPlotSettings(lognormAFT_sbAIC, "Lognormal AFT Prediction", time = fiveyear)
myCalPlotSettings(lognormAFT_sbAIC, "Lognormal AFT Prediction", time = tenyear)
lognormAFT_sbAIC.5year<-as.data.frame(myCalPlotSettings(lognormAFT_sbAIC, "Lognormal AFT Prediction", time = fiveyear)$plotFrames)
ND.chisquare.test(lognormAFT_sbAIC.5year)
## TS pvalue
## 0.168 1.000
lognormAFT_sbAIC.10year<-as.data.frame(myCalPlotSettings(lognormAFT_sbAIC, "Lognormal AFT Prediction", time = tenyear)$plotFrames)
ND.chisquare.test(lognormAFT_sbAIC.10year)
## TS pvalue
## 0.42400 0.99998
myCalPlotSettings(lasso_temp, "Cox LASSO Prediction",
formula = full_model_formula, time = fiveyear)
myCalPlotSettings(lasso_temp, "Cox LASSO Prediction",
formula = full_model_formula, time = tenyear)
lasso_temp.5year<-as.data.frame(myCalPlotSettings(lasso_temp, "Cox LASSO Prediction",
formula = full_model_formula, time = fiveyear)$plotFrames)
ND.chisquare.test(lasso_temp.5year)
## TS pvalue
## 0.121 1.000
lasso_temp.10year<-as.data.frame(myCalPlotSettings(lasso_temp, "Cox LASSO Prediction",
formula = full_model_formula, time = tenyear)$plotFrames)
ND.chisquare.test(lasso_temp.10year)
## TS pvalue
## 0.35200 0.99999
myCalPlotSettings(rot_sfAIC, "Cox Stepwise Forward AIC Prediction", time = fiveyear)
myCalPlotSettings(rot_sfAIC, "Cox Stepwise Forward AIC Prediction", time = tenyear)
rot_sfAIC.5year<-as.data.frame(myCalPlotSettings(rot_sfAIC, "Cox Stepwise Forward AIC Prediction", time = fiveyear)$plotFrames)
ND.chisquare.test(rot_sfAIC.5year)
## TS pvalue
## 0.118 1.000
rot_sfAIC.10year<-as.data.frame(myCalPlotSettings(rot_sfAIC, "Cox Stepwise Forward AIC Prediction", time = tenyear)$plotFrames)
ND.chisquare.test(rot_sfAIC.10year)
## TS pvalue
## 0.326 1.000
myCalPlotSettings(rot_sfBIC, "Cox Stepwise Forward BIC Prediction", time = fiveyear)
myCalPlotSettings(rot_sfBIC, "Cox Stepwise Forward BIC Prediction", time = tenyear)
rot_sfBIC.5year<-as.data.frame(myCalPlotSettings(rot_sfBIC, "Cox Stepwise Forward BIC Prediction", time = fiveyear)$plotFrames)
ND.chisquare.test(rot_sfBIC.5year)
## TS pvalue
## 0.12 1.00
rot_sfBIC.10year<-as.data.frame(myCalPlotSettings(rot_sfBIC, "Cox Stepwise Forward BIC Prediction", time = tenyear)$plotFrames)
ND.chisquare.test(rot_sfBIC.10year)
## TS pvalue
## 0.28 1.00
a <- rbind(c(yr5 = ND.chisquare.test(weiAFT_sbAIC.5year), yr10 = ND.chisquare.test(weiAFT_sbAIC.10year)),
c(ND.chisquare.test(exAFT_sbAIC.5year), ND.chisquare.test(exAFT_sbAIC.10year)),
c(ND.chisquare.test(lognormAFT_sbAIC.5year), ND.chisquare.test(lognormAFT_sbAIC.10year)), c(ND.chisquare.test(lasso_temp.5year), ND.chisquare.test(lasso_temp.10year)), c(ND.chisquare.test(rot_sfAIC.5year), ND.chisquare.test(rot_sfAIC.10year)), c(ND.chisquare.test(rot_sfBIC.5year), ND.chisquare.test(rot_sfBIC.10year)))
## TS pvalue
## 0.055 1.000
## TS pvalue
## 0.37400 0.99999
## TS pvalue
## 0.167 1.000
## TS pvalue
## 0.286 1.000
## TS pvalue
## 0.168 1.000
## TS pvalue
## 0.42400 0.99998
## TS pvalue
## 0.121 1.000
## TS pvalue
## 0.35200 0.99999
## TS pvalue
## 0.118 1.000
## TS pvalue
## 0.326 1.000
## TS pvalue
## 0.12 1.00
## TS pvalue
## 0.28 1.00
tbl4 <- data.frame(cbind(Model = c("Weibull AFT", "Exponential AFT", "Lognormal AFT", "Cox LASSO",
"Cox Forward AIC", "Cox Forward BIC"), a)) %>% gt() %>%
tab_spanner(
label = "5 Year",
columns = c(
yr5.TS, yr5.pvalue)) %>%
tab_spanner(
label = "10 Year",
columns = c(
yr10.TS, yr10.pvalue)) %>%
cols_label(yr5.TS = "Test Statistic",
yr10.TS = "Test Statistic",
yr5.pvalue = "p-value",
yr10.pvalue = "p-value") %>%
tab_header("Table 4: Calibration Chi-Squared Tests") %>%
tab_style(style = cell_text(weight = "bold"),
locations = list(cells_column_labels(), cells_title(), cells_column_spanners()))
tbl4
| Table 4: Calibration Chi-Squared Tests | ||||
| Model | 5 Year | 10 Year | ||
|---|---|---|---|---|
| Test Statistic | p-value | Test Statistic | p-value | |
| Weibull AFT | 0.055 | 1 | 0.374 | 0.99999 |
| Exponential AFT | 0.167 | 1 | 0.286 | 1 |
| Lognormal AFT | 0.168 | 1 | 0.424 | 0.99998 |
| Cox LASSO | 0.121 | 1 | 0.352 | 0.99999 |
| Cox Forward AIC | 0.118 | 1 | 0.326 | 1 |
| Cox Forward BIC | 0.12 | 1 | 0.28 | 1 |
gt::gtsave(tbl4, file = file.path(getwd(), "tbl4.png"))
rotterdam_test$censor<-ifelse(rotterdam_test$death==1,"observed","censored")
rotterdam_test$RS.weiAFT_sbAIC<-predict(weiAFT_sbAIC,newdata=rotterdam_test,type="lp")
ggplot(rotterdam_test,aes(x=RS.weiAFT_sbAIC*(-1),y=dtime,color=censor)) +
geom_point() + theme_bw() + geom_smooth(color="black",method="lm") +
xlab("Risk Score") + ylab("Follow-Up Time") + ggtitle("Weibull AFT Prediction")
## `geom_smooth()` using formula = 'y ~ x'
ggsave("weibull_riskscore.png")
## Saving 7 x 5 in image
## `geom_smooth()` using formula = 'y ~ x'
rotterdam_test$RS.exAFT_sbAIC<-predict(exAFT_sbAIC,newdata=rotterdam_test,type="lp")
ggplot(rotterdam_test,aes(x=RS.exAFT_sbAIC*(-1),y=dtime,color=censor)) +
geom_point() + theme_bw() + geom_smooth(color="black",method="lm") +
xlab("Risk Score") + ylab("Follow-Up Time") + ggtitle("Exponential AFT Prediction")
## `geom_smooth()` using formula = 'y ~ x'
rotterdam_test$RS.lognormAFT_sbAIC<-predict(lognormAFT_sbAIC,newdata=rotterdam_test,type="lp")
ggplot(rotterdam_test,aes(x=RS.lognormAFT_sbAIC*(-1),y=dtime,color=censor)) +
geom_point() + theme_bw() + geom_smooth(color="black",method="lm") +
xlab("Risk Score") + ylab("Follow-Up Time") + ggtitle("Lognormal AFT Prediction")
## `geom_smooth()` using formula = 'y ~ x'
rotterdam_test$RS.lasso_temp<-predict(lasso_temp,newdata=rotterdam_test,type="lp")
ggplot(rotterdam_test,aes(x=RS.lasso_temp,y=dtime,color=censor)) +
geom_point() + theme_bw() + geom_smooth(color="black",method="lm") +
xlab("Risk Score") + ylab("Follow-Up Time") + ggtitle("Cox LASSO Prediction")
## `geom_smooth()` using formula = 'y ~ x'
ggsave("coxlasso_riskscore.png")
## Saving 7 x 5 in image
## `geom_smooth()` using formula = 'y ~ x'
rotterdam_test$RS.rot_sfAIC<-predict(rot_sfAIC,newdata=rotterdam_test,type="lp")
ggplot(rotterdam_test,aes(x=RS.rot_sfAIC,y=dtime,color=censor)) +
geom_point() + theme_bw() + geom_smooth(color="black",method="lm") +
xlab("Risk Score") + ylab("Follow-Up Time") + ggtitle("Cox Stepwise Forward AIC Prediction")
## `geom_smooth()` using formula = 'y ~ x'
rotterdam_test$RS.rot_sfBIC<-predict(rot_sfBIC,newdata=rotterdam_test,type="lp")
ggplot(rotterdam_test,aes(x=RS.rot_sfBIC,y=dtime,color=censor)) +
geom_point() + theme_bw() + geom_smooth(color="black",method="lm") +
xlab("Risk Score") + ylab("Follow-Up Time") + ggtitle("Cox Stepwise Forward BIC Prediction")
## `geom_smooth()` using formula = 'y ~ x'